+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.jvIwrReDhY --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [2 ymakes processing] [7694/7694 modules configured] [2868/4128 modules rendered] [2 ymakes processing] [7694/7694 modules configured] [4116/4128 modules rendered] [2 ymakes processing] [7694/7694 modules configured] [4128/4128 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7700/7700 modules configured] [4128/4128 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 | 5.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 | 5.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 5.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a | 6.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a | 6.1%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 6.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a | 6.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a | 9.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a | 9.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |13.4%| PREPARE $(VCS) |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/tools/libpy3tests-fq-tools.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |17.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/objcopy_ac017d4d4cc3ff1c0c958d9511.o |17.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/objcopy_da9b097ed00de632e7912cb883.o |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |17.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/objcopy_c10ed6018dec02e58309b5ac12.o |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tests/common/test_framework/libpy3tests-common-test_framework.global.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |18.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/moto/bin/moto_server |19.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |21.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/benchmarks/template/ut/ydb-library-benchmarks-template-ut |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |22.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/benchmarks/report/ut/ydb-library-benchmarks-report-ut |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |24.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |24.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |26.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |27.2%| PREPARE $(YMAKE_PYTHON3-4256832079) |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |28.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |28.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |28.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |30.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |30.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |30.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |30.5%| PREPARE $(CLANG_FORMAT-1286082657) |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |31.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |31.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |31.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |31.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |31.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |31.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |31.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |31.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |32.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |33.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |34.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |35.1%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |35.8%| PREPARE $(LLD_ROOT-3808007503) |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |37.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |37.3%| PREPARE $(FLAKE8_PY3-715603131) |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |38.2%| PREPARE $(FLAKE8_PY2-2255386470) |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |38.9%| PREPARE $(PYTHON) |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |39.3%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |39.3%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |42.1%| PREPARE $(BLACK_LINTER-sbr:8415400280) |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |41.8%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |41.0%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |40.7%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |40.7%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/black_linter/black_linter |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/libpy3s3_recipe.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |39.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |39.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_0b6bc206b470900b0b94249ade.o |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |39.1%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |39.2%| RESOURCE $(sbr:4966407557) |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |40.0%| RESOURCE $(sbr:770480022) |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |40.2%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |40.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/libpy3oltp_workload.global.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |40.8%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |40.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |41.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |41.5%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |41.9%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |42.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |42.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |42.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |42.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |42.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |42.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 |42.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |42.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |42.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |42.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |42.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |42.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |42.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |43.3%| PREPARE $(JDK17-472926544) |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |43.7%| PREPARE $(JDK_DEFAULT-472926544) |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |43.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |44.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |44.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/library/cpp/tld/tld.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |44.1%| PREPARE $(WITH_JDK17-sbr:7832760150) |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |44.2%| PREPARE $(CLANG-1922233694) |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |44.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |44.5%| PREPARE $(WITH_JDK-sbr:7832760150) |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |44.5%| PREPARE $(GDB) |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |45.0%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cyson/libpy3library-python-cyson.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cyson/libpy3library-python-cyson.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_c740f52ec3a04fe6a3985bed0b.o |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |45.7%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |46.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |46.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |46.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/c/cyson/liblibrary-c-cyson.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |46.9%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/objcopy_6d9ef78679643f8cbf3879ecf6.o |46.9%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/objcopy_50cfa15c298013619cf03bf14e.o |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |47.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/objcopy_6150f31900b6d744b36b62e2ca.o |47.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/objcopy_e5dd2f6d7a954861412da5f236.o |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |47.1%| PREPARE $(CLANG-874354456) |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |47.6%| PREPARE $(CLANG16-1380963495) |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |47.8%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |47.8%| PREPARE $(CLANG18-1866954364) |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/library/cpp/tld/tld.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/objcopy_cc746dcd5efcce0f01bb1821b3.o |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prettytable/py3/libpy3python-prettytable-py3.global.a |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_b783a1a2aacb855daa1e55fad6.o |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_0ade7a5662c6292edc3a8de02f.o |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |48.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |48.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_6887bde1dc99f5c5c2f0922842.o |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/template/ut/objcopy_1fad4af9c815b6550d6e6a0f07.o |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/template/libpy3library-benchmarks-template.global.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/objcopy_4bacf81686789141de058648b6.o |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_5accfe00d45fb7ebcc30e116b2.o |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_e2cd022168ff179d1441f5d3df.o |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/objcopy_a5eef60051fd0c050ae83c0024.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/objcopy_c42fb4390a9c894aa49ea2a69e.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/objcopy_33d0921bf6a85e1d4516175d7f.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_04f56802b68450abc8421282d0.o |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |48.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/objcopy_bbc17476b53bd73e13af4503a9.o |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_95b3eecc97c453f0c55c456659.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_53073eb93c76466fca8f474c5f.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_fe15eb83a42d9d70d347bbba65.o |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/objcopy_c7be705253ccbceac638376892.o |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/objcopy_efd352795aee39d7ac6e163a2d.o |48.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/objcopy_0885a91ed37500f6bb81f1931d.o >> test.py::py2_flake8 [GOOD] |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/objcopy_3bc621ae87f3fb82bb779540f9.o |48.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/objcopy_83b1a25e92dbaee4df2c6ad186.o |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/objcopy_87995dbaa194805fe31f9e5f7d.o |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/objcopy_3f3fc81ae29f48a257b4fec2f1.o |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/objcopy_acf35ed1b35012a5853cd322cd.o |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_17cef60c2dd0eb7ea46181ba87.o |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a >> test.py::flake8 [GOOD] |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests >> test_copy_table.py::flake8 [GOOD] |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/objcopy_ca55b9f88a84be7f3ad770ebde.o |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/objcopy_98dd3511516854f31ec9936dfb.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_b5b36403e069f48d06f8367722.o >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a >> run_tests.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |48.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_c693478edc1220e7a9143567d1.o >> test.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_342e8590e41686b18307d054a9.o |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/objcopy_aa5ee70477f6b00d816e52a094.o |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/objcopy_ea52d3517fdcfad7a664adc58e.o >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/objcopy_8684f497a81f0f787ed20030eb.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a >> test.py::py2_flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/d78d0f74a3f72be1016c0cf8cf_raw.auxcpp |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_2aa1916d45dca98014edb3d732.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |48.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/objcopy_a679e199a2b1777c233f47e487.o |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/objcopy_a8f03d2dae3ac1934004e03a24.o >> test_alter_ops.py::flake8 [GOOD] |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/objcopy_b88cb53e9b27ab83c5253c0470.o |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/objcopy_e740d8bfaebae830aaeb4ace59.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/objcopy_077abccc5552b4ff2e53b07653.o >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/objcopy_be727953c626d90e9f80dacc0b.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |48.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/library/recipes/docker_compose/docker_compose |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests >> test.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |48.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |48.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/token_accessor_mock/libpy3recipe.global.a |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] |49.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/libpy3simple_queue.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/libpy3solomon_recipe_grpc.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/libpy3statistics_workload.global.a >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_083605b223ce507d0fef919d0d.o |49.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_d23500649301df2a8de48ba70d.o >> test_quoting.py::flake8 [GOOD] |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |49.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_dump_restore.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] |49.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |49.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |49.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o >> test_clickbench.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/objcopy_7e9eb454674c6bbfd0ed60bcc8.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_278b1a63a14648a80c4b930adb.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/objcopy_39c06a63f604e38efe9b035b16.o |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/objcopy_2a49c017ebe910ff77ce8d5701.o |49.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_f9b0feecd0e36f08cbf5c53562.o |49.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_b866963286293af0b6f2139fed.o |49.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.so |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |49.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/template/ut/objcopy_5a8889d4d9e739ede771f4b682.o |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/report/libpy3library-benchmarks-report.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |49.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/objcopy_02dd3cf99405969eeee5d78c5c.o |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/report/ut/objcopy_48bd039582c500fdc3a1e2dbdc.o |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/objcopy_e76fa498393ffb7b6a163e0941.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/report/ut/objcopy_be0e94f199eb5bfc57d1bab945.o |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/38dcacd12926621ca72e30ce1b_raw.auxcpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_25d3afea4b7778a202a80125cb.o |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/mdb_mock/flake8 >> __main__.py::flake8 [GOOD] |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fdd48fc620c42f480ae38b77f5.o |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |49.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fcc835b175560db56b04f51f44.o >> test_postgres.py::flake8 [GOOD] |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |49.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_dae5a42f53b4f98bf1b9fd8118.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_1339ee5ef04af3a5a49d43a6c9.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_7a185a4b35de7733fde931d298.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_9f43001a877b9e371fe700c81d.o |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_93665db601a12d4842de4565e2.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_d2e759e2d0ff1243166a3bc7d9.o >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/template/ut/objcopy_13f09ffac821f20285adf4530d.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_7406de026bf25e30e96a88517d.o |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_69bb4174ba5b22bacbabacd799.o |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_b4d1a41a4041b6372d2a384279.o |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_6cc8d554301fc8d647fa6e6c7c.o |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] >> test_generator.py::flake8 [GOOD] >> test_init.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/objcopy_3fdb568d483b57acc8e627f8c2.o |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/knn/test/ydb-library-yql-udfs-common-knn-test |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_51562f83ff52d1ceaac0c36a08.o |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/build/plugins/lib/test_const/libpy3plugins-lib-test_const.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/lib/libpy3recipes-docker_compose-lib.global.a |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_fe9c8c25e6c570097a9d0c06f9.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/libpy3library-recipes-docker_compose.global.a |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/objcopy_267d7640d8d6aa7b2c189e5495.o |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/file/libfmr-yt_service-file.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp >> test_dml.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/objcopy_b4b56d8f58bdf44e0925188ba7.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_2194854d9f8cbb3e0ba798b861.o >> alter_compression.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_sql.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_mixed.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/objcopy_109c6973d94beeca07e2ddef0d.o |50.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/objcopy_e262577a576bedd9fad641c216.o |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/run_tests >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/objcopy_4b35365d1200804ba15331387f.o |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a >> __main__.py::black [GOOD] >> test.py::flake8 [GOOD] |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |50.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/runner |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/flake8 >> test_init.py::flake8 [GOOD] |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |50.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_transform.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |50.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |50.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/result_compare |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/report/ut/flake8 >> test.py::flake8 [GOOD] |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |50.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/result_convert >> test_stability.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tests/common/udf_test/libpy3tests-common-udf_test.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5a2f230528097042fdaf726fed_raw.auxcpp |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/test/objcopy_53b63c2930dedc40e3c6afa8f9.o |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/objcopy_cd9f2557f3b556bfe912374bf1.o |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |50.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |50.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a >> test_s3.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |50.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_b031a661ba244dffa03ab0c7ec.o |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |50.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/objcopy_87406a377c14b43b64a78bf3ab.o |50.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/objcopy_4090f9cdbd079e13655510cb47.o |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |50.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a >> __main__.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> test_distconf.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/roaring/test/ydb-library-yql-udfs-common-roaring-test |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a >> __main__.py::flake8 [GOOD] |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/mdb_mock/black >> __main__.py::black [GOOD] |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/flake8 >> __main__.py::flake8 [GOOD] |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |50.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a >> compare.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a >> runner::import_test [GOOD] |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a >> integrations_test.py::flake8 [GOOD] |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |51.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/template/ut/objcopy_e1ea7e1a6b192ed8d7f4249c97.o |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |51.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_4246ee6b3505ab22753eb44ce7.o |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |51.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_ab18037bd07c07d8fa01a61eaa.o |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider >> test.py::py2_flake8 [GOOD] >> result_convert::import_test [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/flake8 >> __main__.py::flake8 [GOOD] |51.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_52e86d5ee8fadefdbb415ca379.o |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/libpy3solomon_recipe.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/supp/flake8 >> __main__.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp >> result_compare::import_test [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/flake8 >> __main__.py::flake8 [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.so >> test_quota_exhaustion.py::flake8 [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.global.a |51.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/objcopy_279a6cd6acd7716c8a768f3418.o >> tpc_tests.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/objcopy_c75b78deb8907135e78b5d4be7.o |51.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/objcopy_ffc809e226599b896616cbfb31.o |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/libfmr-table_data_service-local.a |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |51.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |51.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |51.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |51.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/objcopy_3069b7e454a03cfe68ed3d8f44.o >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |51.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/objcopy_3cfb889eb290eb4c98f4393b2a.o |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |51.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/objcopy_1d621e696eaa228c9c1bb7a609.o |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |51.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/test/objcopy_0339b02b57ddfdbe2a44ffffe6.o |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o >> test_example.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |51.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/0de346a5cadde55664f85ed317_raw.auxcpp |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/objcopy_be85b0beafcfe4a7f6fd6c6dce.o |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/import_test >> runner::import_test [GOOD] |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/objcopy_81ae81681ce2388a653cfa5ba3.o >> test.py::flake8 [GOOD] |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_48e09f84949dd34b82c51f21a3.o |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_ce63bab0f89a8715a42271a26a.o |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/objcopy_9509442a50bd9d1393fa0d54e4.o |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_3b212908932716bae8a8e38b2c.o |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/objcopy_bcf2142e31bf537964dc063d11.o |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.so >> test.py::flake8 [GOOD] |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/objcopy_1c95ef09a97797b541386e59f9.o |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_ef822f612b696eb514a5565056.o |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_ad84868df819de98481440cf0a.o |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_disposition.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |51.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/objcopy_d436314897a5ee0efaaf4fb182.o |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/import_test >> result_convert::import_test [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/objcopy_646bfdd69de974aac5b70bb33b.o |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/objcopy_2120ba1c181b59ff8129e88f2e.o |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/objcopy_802d0b4b060209b867e0536698.o |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/objcopy_72adec4fc4bd293cc59aa677e3.o |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/objcopy_8a8aa0e0ce826c3e36d1e30b35.o |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/import_test >> result_compare::import_test [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/interface/libfmr-yt_service-interface.a |51.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/s3_recipe/flake8 >> __main__.py::flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |51.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |51.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load >> test_break.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/impl/libfmr-yt_service-impl.a |51.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |51.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |51.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |51.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/supp/ydb_supp >> run_tests::import_test [GOOD] >> test.py::flake8 [GOOD] |51.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/objcopy_f4569d2c2555a93f87ae2f2009.o |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/objcopy_ec2f15b1a14d65465c3cae8d0f.o |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/objcopy_9be1e63401a3f6c2a01ab4cded.o |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_cf5836766ac30ca7ea957ce368.o >> __main__.py::flake8 [GOOD] >> test_split_merge.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_899316667b8914fe8ec3af85d9.o |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_daba02a22b66dd174e40603586.o |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/objcopy_1f723acf9a6fc746ce94291c30.o >> test.py::flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/objcopy_727baeae5e218e21f568ecc4e1.o |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/objcopy_8da2516060605608346c9e106f.o |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/libpy3ydb_supp.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_c9ab749ab3188a8582c5cefa5e.o |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a >> test_async_replication.py::flake8 [GOOD] |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |51.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/objcopy_4ce393ed8b6973a57c63ca8cde.o |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/objcopy_dcbb27f61cfaa4bf818b6c9b63.o |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/objcopy_dfcd572cf2cb8f4b4d9da1f57b.o >> test_workload.py::flake8 [GOOD] |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |51.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.global.a |51.9%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/_manylinux.py.7ma7.yapyc3 |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/mdb_mock/recipe |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/objcopy_38e2b04aa732673c0cd3290080.o |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_6403bfa5c5e35b29a21c73fb0e.o >> test_sql_streaming.py::flake8 [GOOD] |52.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/report/ut/objcopy_ba012fc69f99f2a3744b7ec6c7.o |52.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/token_accessor_mock/flake8 >> __main__.py::flake8 [GOOD] |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |52.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_a926d3332cb769ac3e6c9e6e37.o |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |52.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/_tokenizer.py.7ma7.yapyc3 |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |52.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |52.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> test.py::py2_flake8 [GOOD] |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |52.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/licenses/__init__.py.7ma7.yapyc3 |52.1%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/nemesis |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_cee1e02beaf827051149b5ca30.o |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/simple_queue |52.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |52.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |52.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |52.1%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/cfg |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |52.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/specifiers.py.7ma7.yapyc3 |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |52.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_5992d4831c5055a481712a2a80.o >> gen-report.py::flake8 [GOOD] |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |52.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |52.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/tests-datasource-clickhouse |52.1%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/statistics_workload |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |52.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_afb48e06933bdee6c5245db82e.o >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/objcopy_d5735b31e5ad3671c3576c1948.o |52.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |52.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |52.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/import_test >> run_tests::import_test [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |52.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/objcopy_2aec57513fd9f6f64f3a0e90d1.o |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |52.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/objcopy_56d7c547c70f96d5b149d2aa13.o |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/token_accessor_mock/recipe |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] >> test_serializable.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] |52.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/objcopy_bc45a031e0d092dc2b0f972904.o |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator_grpc/flake8 >> __main__.py::flake8 [GOOD] |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/udf_resolver/udf_resolver |52.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/objcopy_464649b01e663c0fe0d55da704.o |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/template/ut/flake8 >> test.py::flake8 [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompiler/rescompiler >> test_actorsystem.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/objcopy_3ebc265e84a27f9f2b9c0eb02b.o |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator/recipe/flake8 >> __main__.py::flake8 [GOOD] |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/objcopy_71d64e26c0eaa50a3f15ac879c.o |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/packaging/version.py.7ma7.yapyc3 |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_2e1dd9c9bc385e6efd22b78136.o |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_ec616740770a3a76d53352e427.o |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/objcopy_93552b7e5c6234bd3731aaabaa.o |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_323a17e94d8d570989807d19d3.o |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_c96c333b4f7fc5cb2b98b27907.o |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_287a0728f8b1ad204ac0396eb2.o |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_de67ee476035f2cc7c8d34c996.o |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_c114cbf6b820d92320c1e2c912.o |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_da2669c2228a88c83cd32d45da.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/mdb_mock/objcopy_6226cdbf0af925119e8a880e90.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_589315062f5401a368910248f0.o >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ec94bbf9004678001f4c8195e3.o |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c52ec5ba5ab0b788efaa5ed704.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_359d47616c1036f0865eb1e662.o |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |52.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/_parser.py.7ma7.yapyc3 |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_16842d72ae0dac1856818f841e.o |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_61613f0bd98876f149d8574891.o |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/5c5fdf614c3039a8dba94a4f38_raw.auxcpp |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/objcopy_c03bba49e13c66d080e15c8f35.o |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_903d4758faea71f1363e296b3f.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ce0222bab1634be9f9a52f715d.o |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/c664ef6ca80e747b410e1da324_raw.auxcpp |52.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_d68e1e5b762e412afe6a534487.o |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_dc1e8788b8287c02880cfe2814.o |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |52.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |52.8%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/markers.py.7ma7.yapyc3 |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_c43ce24509a50b033fa4050a33.o |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_d009f62008041e2f09cdbf7def.o |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/local_ydb/local_ydb |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/objcopy_b4ebb94deb4cea673457b77fcc.o |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49e9948af399bc60603a7d2db5.o |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_64bde13108f9284b2e9f0bbb7a.o |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_52d3e6a0651990fc997ab40ba2.o |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_69005edd0f9166633ccd754c08.o |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_951c70889c9404d1662da27090.o |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/tests-datasource-postgresql |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_e317764e105a7e9e48b67a7b7e.o |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/objcopy_6c8bedcdc8efb835a928b278ce.o |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/join/yql-providers-generic-connector-tests-join |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |52.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/connector-tests-datasource-mysql |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |52.9%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/__init__.py.7ma7.yapyc3 |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_93891caf0b2b82d249b0a98fa8.o |52.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |52.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |52.3%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli |52.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |52.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/objcopy_fbab8021d30ec8df368308c49a.o |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |52.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/objcopy_0ee10940713087f217114ab4be.o |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/scramp/libpy3contrib-python-scramp.global.a |52.6%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/simple_queue |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/scenario/libpy3tests-utils-scenario.global.a |52.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/4399546af28cb40e5d74ea4a4b_raw.auxcpp |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/clickhouse-connect/libpy3contrib-python-clickhouse-connect.global.a >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/lz4/py3/libpy3python-lz4-py3.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/cfg/bin/ydb_configure |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pg8000/libpy3contrib-python-pg8000.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asn1crypto/libpy3contrib-python-asn1crypto.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/test/library-yql-udfs-common-clickhouse-client-test |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/datasource-ms_sql_server |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/lz4/py3/libpy3python-lz4-py3.global.a |52.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/metadata.py.7ma7.yapyc3 |52.8%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/tags.py.7ma7.yapyc3 |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/e273c09e9944ed8d4db55cf519_raw.auxcpp |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/objcopy_04bfe236a98e0af88f14e75aff.o |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/zstandard/py3/libpy3python-zstandard-py3.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/clickhouse-connect/libpy3contrib-python-clickhouse-connect.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/run/libpy3tests-utils-run.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/common_test_cases/libpy3connector-tests-common_test_cases.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/clients/libpy3tests-utils-clients.global.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libpy3common-token_accessor-grpc.global.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/zstandard/py3/libpy3python-zstandard-py3.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |52.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/python/packaging/py3/packaging/_elffile.py.7ma7.yapyc3 |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/token_accessor_mock/objcopy_4f3652827ba5acef339141a388.o |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/tests-datasource-oracle |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |52.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |52.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |52.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |52.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |52.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_f0c8f68ad8d5be2aa410794898.o |52.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/olap_workload |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/flake8 >> __main__.py::flake8 [GOOD] |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/connector-tests-datasource-ydb |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |52.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |52.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |53.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_89b3e69f7cdba68b4eefcae48c.o |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |53.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/aae788a890ddcb1702c659c8aa_raw.auxcpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |53.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_0c451aebc6dafbdf0d9da2ab02.o |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |53.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |53.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |53.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |53.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |53.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_03f75cad4510fd9d018635026c.o |53.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |53.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/packaging/_structures.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/packaging/utils.py.7ma7.yapyc3 |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/packaging/requirements.py.7ma7.yapyc3 |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/packaging/licenses/_spdx.py.7ma7.yapyc3 |53.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |53.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |53.4%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/packaging/_musllinux.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/objcopy_8d2ea3c78a255bb4c87c2fc54a.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/tld.inc |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/objcopy_8a480df96cc6cd49399cfaea66.o |53.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/8ba4bc7bbd068d496fd8d38c20_raw.auxcpp |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/objcopy_c6dc9ea6dc9d2c6577817a5fb6.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/objcopy_0ed2be5b1f8bbcf21c01d97861.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_aebf7c73fcaf6a54715cc177c8.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/objcopy_eff72a5efd2fa66b3363e16886.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/objcopy_7bb4c5cc9026f2b8034570c51c.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |53.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/objcopy_fa785ada0d264f44db0c3df820.o |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |53.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/objcopy_71c5c57afe9530748c30b055f8.o |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |53.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/objcopy_5a23f199ba2ad5114d97d1e863.o |53.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/objcopy_41a67a8b373ce2db88d0a50b4b.o |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/364af2d5bcc4d0c488c09257c5_raw.auxcpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/2486a40dc27b3deeed2a20d6d7_raw.auxcpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |53.6%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/fix_elf/fix_elf |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |53.7%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/py3cc |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |53.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/objcopy_8178bd9215c9d2657ba1347c56.o |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_a65a4fae8912a32233240d3c51.o |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1326afc143d720f2af434cd836.o |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |53.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |53.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/objcopy_aa67c8c9819a6e460379e522ab.o |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_9314464e3560b2511ac931acd9.o |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/objcopy_aab724be52dad3663d415db204.o |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |53.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/0e6ff9458826896f7a7b1b2eaf_raw.auxcpp |53.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/objcopy_1e0fb16076b5a3105119e574a8.o |53.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/objcopy_ac8dbe7f54a2cb7efb6636f75f.o |53.9%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/objcopy_e5dd2f6d7a954861412da5f236.o |53.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/objcopy_cf816152ca64b2ca8294df441b.o |53.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/objcopy_c9fea661be8c334f799cd67e82.o |53.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/objcopy_fbfd6ed64fc3217fc7ef50a203.o |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |54.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8db6616d40f8020d0632222fe3.o |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/edaf602b2011baa1519a223d63_raw.auxcpp |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_44fac4fe441507735704a000ad.o |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/objcopy_c5b31a2f5aa73f98bc83284c22.o |54.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/objcopy_fcb137d2b763154d727730cad3.o |54.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/objcopy_24cfda7d41447be7f781827fb8.o |54.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/objcopy_158148a8bf02e291fb1e4cb617.o |54.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/objcopy_73ddf87b96fcbfc4f715436dc4.o |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |54.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_1dba5118ef0a485f3bf803be50.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/objcopy_b0df339b5cd42be3b946278515.o |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/objcopy_f5a47631b1547ad5bd38d78201.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |54.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/7fc0a944ff3f4c9130511a5804_raw.auxcpp |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/objcopy_9ec58f723c034c871861783d19.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/objcopy_8f7d2de1c8d713e4feeacffe30.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |54.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_5f161468ff5322b803d4d0dc79.o |54.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/objcopy_80920e26d93a094bd0bafe6208.o |54.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/objcopy_508ca12bde6da4b8c8b0fdd382.o |54.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/objcopy_940b9a794cb8fbc6ebdf926276.o |54.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/objcopy_dc680a1e3364f87424f30be8c7.o |54.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |54.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |54.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/objcopy_d8c1983c83374ff3531b03c654.o |54.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |54.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |54.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/objcopy_30225cba10a905b2295e2399b3.o |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/udfs/common/url_base/url_base.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/1e2480c2b04be34c00bb78e34e_raw.auxcpp |54.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/objcopy_51633d73145b900ec8fab6f2c9.o |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/objcopy_c93b2f849b5f6ee8532dd4d6fd.o |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/objcopy_9be2dadc45d1a9fdc157172661.o |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8fca143a218b930f297b779e3a.o |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libpy3solomon-solomon_accessor-grpc.global.a |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_938861be99a6cedecb22904193.o |54.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/objcopy_9f24a29ba641072592b3e37403.o |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |54.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |54.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |54.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/objcopy_bab46dc0e0bb01200e952d765c.o |54.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |54.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |54.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |54.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |54.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/test/objcopy_00c9f91e5c0f545a1859555cf7.o |54.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/objcopy_b701dac104d6ebd83e6489821f.o |54.8%| [CC] {default-linux-x86_64, relwithdebinfo, pic} $(S)/library/cpp/tld/tld.cpp |54.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |54.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/objcopy_b758c885f9640dabee25c0b272.o |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |54.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/tld/tld.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |54.3%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/objcopy_e5dd2f6d7a954861412da5f236.o |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |53.1%| [CC] {BAZEL_UPLOAD} $(S)/library/cpp/tld/tld.cpp |53.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |53.2%| [CC] {BAZEL_UPLOAD} $(S)/library/cpp/tld/tld.cpp |53.2%| COMPACTING CACHE 507.2MiB |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/_manylinux.py.7ma7.yapyc3 |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/__init__.py.7ma7.yapyc3 |53.1%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/_elffile.py.7ma7.yapyc3 |53.1%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/_manylinux.py.7ma7.yapyc3 |53.1%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |53.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |53.2%| [TS] {RESULT} ydb/tests/tools/s3_recipe/flake8 |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/_tokenizer.py.7ma7.yapyc3 |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/tags.py.7ma7.yapyc3 |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/_parser.py.7ma7.yapyc3 |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/licenses/__init__.py.7ma7.yapyc3 |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/markers.py.7ma7.yapyc3 |53.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/specifiers.py.7ma7.yapyc3 |53.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |53.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |53.3%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/packaging/metadata.py.7ma7.yapyc3 |53.3%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |53.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |53.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |53.3%| [TS] {RESULT} ydb/tests/example/flake8 |53.3%| [TS] {RESULT} ydb/tests/stress/oltp_workload/flake8 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/__init__.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/_elffile.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/_tokenizer.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/tags.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/licenses/__init__.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/_parser.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/markers.py.7ma7.yapyc3 |53.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/metadata.py.7ma7.yapyc3 |53.4%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/packaging/specifiers.py.7ma7.yapyc3 |53.4%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |53.5%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |53.4%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |53.4%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |53.4%| [TS] {RESULT} ydb/tests/tools/token_accessor_mock/flake8 |53.4%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/objcopy_50cfa15c298013619cf03bf14e.o |53.5%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |53.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |53.5%| [TS] {RESULT} ydb/tests/tools/mdb_mock/black |53.5%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/import_test |53.5%| [TS] {RESULT} ydb/tests/stress/olap_workload/flake8 |53.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |53.5%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/objcopy_6150f31900b6d744b36b62e2ca.o |53.5%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/import_test |53.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |53.5%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/objcopy_50cfa15c298013619cf03bf14e.o |53.6%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/objcopy_6d9ef78679643f8cbf3879ecf6.o |53.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |53.6%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/objcopy_6150f31900b6d744b36b62e2ca.o |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |53.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |53.6%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |53.6%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |53.6%| [TS] {RESULT} ydb/library/benchmarks/template/ut/flake8 |53.6%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |53.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |53.7%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 |53.7%| [PY] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/objcopy_6d9ef78679643f8cbf3879ecf6.o |53.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |53.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |53.7%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |53.7%| [TS] {RESULT} ydb/tests/stress/simple_queue/flake8 |53.7%| [TS] {RESULT} ydb/tests/stress/statistics_workload/flake8 |53.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |53.7%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |53.8%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |53.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |53.8%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |53.8%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |53.8%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |53.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |53.8%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |53.8%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |53.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |53.8%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |53.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |53.8%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |53.9%| [TS] {RESULT} ydb/tests/olap/load/flake8 |53.9%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |53.9%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/flake8 |53.9%| [TS] {RESULT} ydb/tests/tools/mdb_mock/flake8 |53.9%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |53.9%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |53.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |53.9%| [TS] {RESULT} ydb/tests/olap/common/flake8 |53.9%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 |53.9%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |53.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |54.0%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |54.0%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |54.0%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |54.0%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |54.0%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |54.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |54.0%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |54.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/import_test |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |54.0%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |54.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |54.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |54.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |54.1%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |54.1%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 |54.1%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |54.1%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |54.1%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |54.1%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |54.1%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |54.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |54.1%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |54.1%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |54.1%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |54.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |54.2%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |54.2%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |54.2%| [TS] {RESULT} ydb/library/benchmarks/report/ut/flake8 |54.2%| [TS] {RESULT} ydb/tests/functional/config/flake8 |54.2%| [TS] {RESULT} ydb/tests/functional/api/flake8 |54.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |54.2%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |54.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |54.2%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |54.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |54.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |54.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |54.3%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |54.3%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |54.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |54.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |54.3%| [TS] {RESULT} ydb/tests/supp/flake8 |54.3%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |54.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |54.3%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 |54.3%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |54.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |54.3%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator/recipe/flake8 |54.4%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |54.4%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |54.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |54.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |54.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |54.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |54.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |54.4%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 |54.4%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 |54.4%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |54.4%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |54.4%| [TS] {RESULT} ydb/tests/stability/tool/flake8 |54.5%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |54.5%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 |54.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |54.5%| [TS] {RESULT} ydb/tests/sql/flake8 |54.5%| [TS] {RESULT} ydb/tests/sql/large/flake8 |54.5%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 |54.5%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |54.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |54.5%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |54.5%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |54.5%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |54.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |54.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |54.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |54.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |54.6%| [TS] {RESULT} ydb/tests/fq/common/flake8 |54.6%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |54.6%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |54.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |54.6%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |54.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |54.6%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |54.6%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |54.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |54.7%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |54.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |54.7%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |54.7%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |54.7%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |54.7%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |54.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |54.7%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |54.7%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |54.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |54.7%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |54.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |54.8%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/import_test |54.8%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |54.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |54.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |54.8%| [TS] {RESULT} ydb/tests/olap/flake8 |54.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |54.8%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |54.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |54.9%| [TS] {RESULT} ydb/tests/library/ut/flake8 |54.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |55.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |55.0%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |55.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |55.1%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |55.3%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator_grpc/flake8 |55.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |55.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |55.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |55.4%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |58.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |61.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |61.7%| [AR] {RESULT} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |62.2%| [AR] {BAZEL_UPLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |72.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.7%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |73.0%| [AR] {RESULT} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/supp/ydb_supp |73.1%| [LD] {RESULT} $(B)/ydb/tests/supp/ydb_supp |73.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |73.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |73.1%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |73.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/supp/ydb_supp |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.0%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |73.1%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |73.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |73.1%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |73.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |73.2%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |73.3%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/ydb_cli |73.3%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts >> test_generator.py::TestTpchGenerator::test_s1 >> test_generator.py::TestTpcdsGenerator::test_s1_state >> test_generator.py::TestTpchGenerator::test_s1_state >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> ydb_supp::import_test [GOOD] |73.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |73.3%| [LD] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize >> test_generator.py::TestTpcdsGenerator::test_s1 >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb >> test_init.py::TestClickbenchInit::test_s1_s3 >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_init.py::TestTpcdsInit::test_s1_column >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] >> test_init.py::TestTpchInit::test_s1_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpchInit::test_s100_column |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s100_column [GOOD] |73.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/supp/import_test >> ydb_supp::import_test [GOOD] |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |73.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s100_column [GOOD] |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.4%| [TS] {RESULT} ydb/tests/supp/import_test |73.4%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.4%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |73.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/import_test >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |73.5%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/import_test |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |73.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |73.5%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |73.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests >> ydb-tests-functional-benchmarks_init::import_test [GOOD] >> ydb-tests-fq-multi_plane::import_test [GOOD] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode >> ydb-tests-functional-serverless::import_test [GOOD] |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/import_test >> ydb-tests-functional-benchmarks_init::import_test [GOOD] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |73.7%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test |73.7%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/import_test |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |73.7%| [TS] {RESULT} ydb/tests/functional/serverless/import_test >> ydb_recipe::import_test [GOOD] |73.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.7%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/import_test >> ydb_recipe::import_test [GOOD] |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |73.8%| [TS] {RESULT} ydb/public/tools/ydb_recipe/import_test |73.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |73.8%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas >> ydb-tests-functional-limits::import_test [GOOD] |73.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] >> ydb-tests-functional-sqs-large::import_test [GOOD] >> ydb-tests-olap-oom::import_test [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables |73.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |73.8%| [TS] {RESULT} ydb/tests/functional/limits/import_test |73.9%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/import_test >> ydb-tests-olap-oom::import_test [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |73.9%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |73.9%| [TS] {RESULT} ydb/tests/olap/oom/import_test >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView |73.9%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables >> ydb-tests-functional-suite_tests::import_test [GOOD] >> ydb-tests-tools-nemesis-ut::import_test [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables >> ydb-tests-functional-ttl::import_test [GOOD] |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> ydb-tests-functional-autoconfig::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |73.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test |73.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |73.9%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] |74.0%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |74.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |74.0%| [TS] {RESULT} ydb/tests/functional/ttl/import_test |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |74.0%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |74.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |74.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |74.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |74.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |74.1%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |74.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |74.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |74.2%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |74.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard >> solomon_recipe::import_test [GOOD] |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/ut/ydb-core-client-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> ydb-tests-functional-tpc-medium::import_test [GOOD] |74.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |74.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator/recipe/import_test >> solomon_recipe::import_test [GOOD] >> ydb-tests-datashard-split_merge::import_test [GOOD] |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |74.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |74.3%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator/recipe/import_test |74.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/import_test >> ydb-tests-datashard-split_merge::import_test [GOOD] |74.3%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |74.3%| [TS] {RESULT} ydb/tests/datashard/split_merge/import_test |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |74.4%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/fqrun/fqrun |74.4%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |74.4%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |74.4%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> TBackupTests::BackupUuidColumn[Zstd] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |74.5%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TBackupTests::ShouldSucceedOnLargeData[Raw] |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> TBackupTests::BackupUuidColumn[Raw] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions [GOOD] |74.7%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |74.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> ydb-tests-functional-tenants::import_test [GOOD] |74.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] |75.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest |75.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |75.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/contrib/python/moto/bin/moto_server |75.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |75.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |75.2%| [LD] {RESULT} $(B)/contrib/python/moto/bin/moto_server |75.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |75.3%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TFlatTest::PathSorting |75.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] |76.0%| [TS] {RESULT} ydb/tests/functional/tenants/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-05-05T03:04:20.498753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.498779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.498784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.498789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.498795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.498799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.498808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.498822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.498907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.498982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.513787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.513808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.513917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon 2025-05-05T03:04:20.517206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.517277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.517308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.518948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.519001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.519101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.519161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.519739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.519984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.519994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.520026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.520033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.520040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.520059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.521254Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.542732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.542789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.542828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.542885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.542893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.543403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.543428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.543471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.543480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.543485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.543489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.543868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.543877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.543882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.544149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.544157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.544163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.544169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.544801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.545231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.545263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.545414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.545438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.545445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.545495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.545501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.545522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.545533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.545929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.545937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.545963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.545968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.546017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.546023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:20.546033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:20.546037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.546042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:20.546045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.546049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:20.546054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.546059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:20.546063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:20.546073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path ... ost:9584 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 14254DF4-D5AF-4AA1-B9E9-D90E46ADF6A2 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /view/permissions.pb / / 43 2025-05-05T03:04:26.975172Z node 16 :EXPORT DEBUG: HandlePermissionsPutResponse, self: [16:580:2541], result: PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-05-05T03:04:26.980217Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:26.980228Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T03:04:26.980293Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:26.980299Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:208:2210], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-05-05T03:04:26.980312Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:04:26.980320Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:26.980529Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T03:04:26.980543Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T03:04:26.980547Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-05T03:04:26.980552Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-05T03:04:26.980557Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-05T03:04:26.980578Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-05T03:04:26.981193Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 TestWaitNotification wait txId: 1004 2025-05-05T03:04:26.981255Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:04:26.981263Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:04:26.981325Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:04:26.981331Z node 16 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1004, at schemeshard: 72057594046678944 REQUEST: PUT /view/metadata.json HTTP/1.1 HEADERS: Host: localhost:9584 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7CE0A11C-5F2A-45D2-81D8-A99734F0C2E8 amz-sdk-request: attempt=1 content-length: 31 content-md5: NIbLWVScnysfZNPAOZgBoA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /view/metadata.json / / 31 REQUEST: PUT /table/metadata.json HTTP/1.1 HEADERS: Host: localhost:9584 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C7454D9E-D973-445C-9021-00A8A2A6344C amz-sdk-request: attempt=1 content-length: 73 content-md5: oBd372HtOJ3JW3N2b2gUVA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/metadata.json / / 73 2025-05-05T03:04:26.983400Z node 16 :EXPORT DEBUG: HandleMetadataPutResponse, self: [16:580:2541], result: PutObjectResult { ETag: 3486cb59549c9f2b1f64d3c0399801a0 } 2025-05-05T03:04:26.983413Z node 16 :EXPORT INFO: Finish, self: [16:580:2541], success: 1, error: 2025-05-05T03:04:26.983445Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:26.983451Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnSchemeUploadResult: id# 1004, itemIdx# 0, success# 1, error# 2025-05-05T03:04:26.986242Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoComplete REQUEST: PUT /table/permissions.pb HTTP/1.1 HEADERS: Host: localhost:9584 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D97A8669-4532-4B93-9AD5-4E82444BD01F amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/permissions.pb / / 43 REQUEST: PUT /table/scheme.pb HTTP/1.1 HEADERS: Host: localhost:9584 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E6FDB1AB-E12C-4D56-8A72-ABCBCF1559FE amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/scheme.pb / / 355 REQUEST: PUT /table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:9584 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7A216D1D-138D-4BC0-8E4B-27E1E1FDDDD4 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/data_00.csv / / 0 2025-05-05T03:04:27.010746Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 487 RawX2: 68719479193 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:04:27.010776Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-05T03:04:27.010807Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 487 RawX2: 68719479193 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:04:27.010822Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 487 RawX2: 68719479193 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:04:27.010837Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.010843Z node 16 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.010848Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T03:04:27.010854Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T03:04:27.010907Z node 16 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.011478Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.011576Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.011586Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T03:04:27.011600Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:04:27.011605Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:04:27.011611Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:04:27.011614Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:04:27.011619Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T03:04:27.011634Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:125:2151] message: TxId: 281474976710759 2025-05-05T03:04:27.011641Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:04:27.011647Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T03:04:27.011651Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T03:04:27.011685Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-05T03:04:27.012263Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T03:04:27.012279Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T03:04:27.012289Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:27.012295Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-05-05T03:04:27.012300Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710759, id# 1004, itemIdx# 1 2025-05-05T03:04:27.012720Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:27.012743Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:04:27.012750Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:624:2582] TestWaitNotification: OK eventTxId 1004 >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] |76.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.020631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.020660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.020666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.020672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.020684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.020688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.020698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.020715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.020803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.020875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.035767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.035790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.039353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.039608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.039650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.040652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.040701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.040791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.040981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.041610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.041885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.041895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.041914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.041920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.041925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.041962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.043215Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.070929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.071006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.071088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.071155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.071167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.072105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.072136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.072204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.072224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.072229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.072234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.072715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.072727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.072732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.073075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.073085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.073092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.073099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.073749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.074136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.074178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.074396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.074419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.074426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.074483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.074490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.074522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.074534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.074923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.074932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.074979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.074985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.075058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.075066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.075080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.075085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.075091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.075095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.075099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.075105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.075110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.075114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.075126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.075133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.075137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.075443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.075459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.206752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.206796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.214624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T03:04:27.214683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T03:04:27.214908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.214948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.214961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T03:04:27.214990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T03:04:27.215039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.217885Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T03:04:27.221546Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T03:04:27.225556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.225590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:04:27.225690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.225697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T03:04:27.225837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.225847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T03:04:27.226115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.226127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.226133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:04:27.226138Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:27.226146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:04:27.226162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:1128 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 588C857B-FC06-42D0-A208-87AEBB564B3C amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T03:04:27.226756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-05T03:04:27.226831Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:1128 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8BBB2E4B-4511-43E3-99BC-45BC815CCCE0 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T03:04:27.229338Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-05-05T03:04:27.229371Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T03:04:27.229445Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:1128 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9D91A5A6-3760-4841-A369-3C86222F8EBA amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-05-05T03:04:27.230852Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-05-05T03:04:27.230871Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:27.230914Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:27.234705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T03:04:27.234728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:27.234756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T03:04:27.234771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T03:04:27.234784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.234789Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.234794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:27.234801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:27.234847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.235369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.235433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.235442Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:27.235458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.235463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.235469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.235472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.235477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:27.235492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:27.235498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.235504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:27.235508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:27.235538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.236005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:27.236019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 |76.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |76.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.158124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.158147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.158152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.158157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.158167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.158172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.158181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.158216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.158297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.158365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.199370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.199387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.207418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.207848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.207892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.209325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.209376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.209479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.209848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.210708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.210976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.210988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.211005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.211012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.211017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.211048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.212230Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.232777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.232842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.232907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.232968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.232979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.233892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.233919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.233980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.234000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.234004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.234009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.234601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.234614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.234619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.236170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.236186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.236191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.236199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.236838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.237260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.237301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.237483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.237509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.237516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.237590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.237598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.237628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.237640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.238063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.238071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.238109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.238115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.238215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.238223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.238236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.238241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.238246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.238249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.238253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.238259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.238263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.238268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.238279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.238285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.238289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.238608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.238626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... EMESHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.347266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.347312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.347821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T03:04:27.347851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T03:04:27.347985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.348005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.348012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T03:04:27.348034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T03:04:27.348058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.352993Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T03:04:27.356764Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T03:04:27.358641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.358658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:04:27.358730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.358736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T03:04:27.358961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.358976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T03:04:27.359205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.359218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.359224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:04:27.359230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:27.359235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:04:27.359250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:6811 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D6264AB2-9914-4CBB-868F-BE8FE75AB44C amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T03:04:27.360477Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:6811 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 32D36173-878C-4D53-994E-8B4057572E3B amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T03:04:27.371691Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-05-05T03:04:27.371841Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T03:04:27.371866Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T03:04:27.372051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:6811 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 54DF0BA6-07E9-443B-82D3-939BD29D9E89 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-05-05T03:04:27.377459Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-05-05T03:04:27.377483Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:27.377527Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:27.379632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.379652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:27.379677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.379691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.379720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.379725Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.379730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:27.379736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:27.379770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.380215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.380283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.380295Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:27.380306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.380310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.380316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.380319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.380324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:27.380338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:27.380344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.380350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:27.380354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:27.380376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.380766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:27.380779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.362043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.362061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.362064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.362068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.362074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.362077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.362083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.362093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.362150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.362244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.374239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.374258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.377628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.377909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.377947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.378996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.379041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.379144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.379329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.380029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.380257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.380268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.380285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.380290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.380295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.380324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.381420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.401492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.401556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.401625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.401684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.401692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.404133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.404170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.404241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.404264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.404269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.404274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.405048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.405064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.405069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.405763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.405780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.405788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.405794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.406982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.407519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.407559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.407758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.407786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.407794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.407852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.407860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.407890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.407902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.408473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.408482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.408534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.408539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.408600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.408607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.408620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.408625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.408629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.408633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.408638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.408643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.408647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.408651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.408662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.408667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.408671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.409009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.409030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.527464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.527497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.527961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T03:04:27.527995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T03:04:27.528139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.528158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.528165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T03:04:27.528196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T03:04:27.528222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.530736Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T03:04:27.533313Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T03:04:27.533924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.533931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:04:27.533986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.533990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T03:04:27.534071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.534077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T03:04:27.534256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.534267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.534272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:04:27.534277Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:27.534283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:04:27.534296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:65321 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6D030A46-FC63-409B-ADF1-D36E5496F24F amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T03:04:27.534622Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T03:04:27.535368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:65321 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F4AF362B-F7DC-4691-B784-B72792D5D8D5 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T03:04:27.537627Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-05-05T03:04:27.537648Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T03:04:27.537692Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:65321 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41DCE35F-7B75-4153-BA4B-4666E2871659 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-05-05T03:04:27.538258Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-05-05T03:04:27.538271Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:27.538298Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:27.550552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.550574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:27.550595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.550606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.550616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.550619Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.550622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:27.550628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:27.550666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.553020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.553063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.553072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:27.553085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.553088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.553092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.553094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.553097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:27.553111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:27.553115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.553119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:27.553121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:27.553146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.553917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:27.553935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 |77.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.392642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.392668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.392673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.392678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.392688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.392692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.392701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.392717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.392802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.392874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.405355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.405377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.409407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.409793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.409837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.410935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.410990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.411085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.411270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.411930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.412210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.412223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.412245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.412253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.412259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.412295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.413643Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.432489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.432564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.432640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.432704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.432715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.433481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.433508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.433594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.433614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.433619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.433624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.433990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.434000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.434004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.434314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.434323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.434329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.434335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.434874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.435187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.435226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.435399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.435423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.435430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.435485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.435491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.435520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.435531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.435874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.435880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.435926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.435931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.436000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.436007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.436018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.436022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.436028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.436031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.436035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.436039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.436044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.436047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.436057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.436063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.436068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.436336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.436349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... EMESHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.562862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.562890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.563123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T03:04:27.563149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T03:04:27.563312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.563327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.563332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T03:04:27.563347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T03:04:27.563376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.565207Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T03:04:27.568560Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:6368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1805F02F-700D-4CF0-B0C5-D7A4FA83F73E amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T03:04:27.570344Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T03:04:27.571385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.571397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:6368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0383C75A-C01B-49BC-9FB9-DB81CA87391D amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD 2025-05-05T03:04:27.571490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T03:04:27.571507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T03:04:27.571693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.571704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.571805Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-05-05T03:04:27.571845Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T03:04:27.571904Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T03:04:27.572019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.572032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.572037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:04:27.572043Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:27.572049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:04:27.572067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:6368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9B5EEFAF-EF86-4F7B-B161-3E53583891A8 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-05-05T03:04:27.573253Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-05-05T03:04:27.573265Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:27.573302Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:27.574460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-05T03:04:27.586736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T03:04:27.586764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:27.586793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T03:04:27.586808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T03:04:27.586824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.586829Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.586834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:27.586842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:27.586892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.591074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.591156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.591169Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:27.591195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.591200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.591207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:27.591210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.591215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:27.591244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:27.591253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:27.591259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:27.591263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:27.591311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:27.592035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:27.592050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 |77.1%| [AR] {default-linux-x86_64, relwithdebinfo, pic} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |77.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication >> TFlatTest::PartBloomFilter [GOOD] |77.1%| [AR] {RESULT} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |77.4%| [AR] {BAZEL_UPLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.739911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.739937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.739942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.739948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.739959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.739963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.739973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.739987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.740062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.740123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.753690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.753716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.757589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.758061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.758108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.759368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.759419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.759513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.759725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.760486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.760744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.760758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.760778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.760784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.760790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.760823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.762040Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.784130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.784211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.784286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.784343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.784352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.786804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.786846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.786917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.786939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.786944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.786951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.790957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.790990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.790999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.791631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.791644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.791651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.791661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.792690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.793238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.793285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.793491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.793517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.793528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.793613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.793621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.793672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.793686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.794120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.794129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.794191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.794211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.794294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.794302Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.794314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.794319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.794325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.794328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.794333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.794338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.794343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.794348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.794361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.794368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.794372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.794728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.794743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 4 2025-05-05T03:04:27.977674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.977687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:27.977693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:04:27.977699Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:27.977706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-05T03:04:27.977726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:17009 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0B9F73F2-19F0-45F5-A19A-C5C0D52C30CD amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:17009 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0ACABC89-B402-492B-9BC1-712DC1019A9B amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-05-05T03:04:27.978329Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T03:04:27.978910Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:478:2434], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-05-05T03:04:27.978923Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:478:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:27.978988Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:477:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:27.980641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:17009 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FC8AA988-3ACC-45BD-8819-205182119B6E amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-05-05T03:04:27.982629Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-05-05T03:04:27.982653Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2430] 2025-05-05T03:04:27.982700Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2431], sender# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T03:04:27.983691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host2025-05-05T03:04:27.983706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-05-05T03:04:27.983742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.983757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.983771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 : localhost:17009 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D7E8BAB2-6778-4BCC-BC4D-B35C251C7DFA amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD 2025-05-05T03:04:27.983813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-05-05T03:04:27.984253Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-05-05T03:04:27.984265Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2431], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:27.984331Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:27.986151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.999578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.999612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:27.999643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.999659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:04:27.999676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.999682Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.999688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:27.999694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T03:04:27.999701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:27.999755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:28.000283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:28.000395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:28.000405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:28.000418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:28.000423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:28.000429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:28.000432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:28.000437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:28.000453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2336] message: TxId: 102 2025-05-05T03:04:28.000460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:28.000466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:28.000471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:28.000498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:04:28.001004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:28.001015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:452:2411] TestWaitNotification: OK eventTxId 102 >> ydb-tests-functional-hive::import_test [GOOD] >> TLocksFatTest::PointSetBreak >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] |77.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |77.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] >> TFlatTest::ReadOnlyMode |77.7%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-05-05T03:04:27.736057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792166254955712:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:27.736123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000285/r3tmp/tmpTF0Cwy/pdisk_1.dat 2025-05-05T03:04:27.801070Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16239 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:27.841770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:27.848592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-05T03:04:27.871437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:27.871462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:27.872286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746414267892 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1746414267941 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-05-05T03:04:28.156957Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500792167400436022:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:28.156971Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000285/r3tmp/tmpKmesUn/pdisk_1.dat 2025-05-05T03:04:28.176492Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9573 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:28.261396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:28.261430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:28.261871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:28.262475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:28.266660Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:04:28.268403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:28.414577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::GoodLock |77.8%| [TS] {RESULT} ydb/tests/functional/hive/import_test |77.9%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |77.9%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |78.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |78.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TLocksFatTest::RangeSetRemove ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.617763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.617789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.617794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.617799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.617805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.617809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.617820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.617836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.617942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.618022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.631060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.631082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.631168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.634454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.634488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.634509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.636910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.636947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.637031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.637065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.637411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.637612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.637622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.637665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.637674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.637680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.637701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.638854Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.656685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.656740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.656778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.656823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.656830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.657432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.657458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.657504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.657513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.657518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.657523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.658970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.658986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.658991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.659922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.659934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.659939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.659945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.660575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.661042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.661081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.661266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.661290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.661297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.661355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.661362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.661387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.661398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.663078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.663088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.663126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.663131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.663140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.663147Z node 1 :FLAT_TX_SCHEMESHARD I ... 8944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.951730Z node 24 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:28.951735Z node 24 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T03:04:28.951739Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:28.952064Z node 24 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.952078Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.952082Z node 24 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:28.952086Z node 24 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:28.952093Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T03:04:28.952105Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-05T03:04:28.952330Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-05T03:04:28.952350Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000007 2025-05-05T03:04:28.952612Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:28.952633Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 103079217261 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:28.952640Z node 24 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-05T03:04:28.952661Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-05T03:04:28.952668Z node 24 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T03:04:28.952672Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T03:04:28.952676Z node 24 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T03:04:28.952680Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T03:04:28.952687Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:28.952695Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T03:04:28.952701Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-05T03:04:28.952706Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T03:04:28.952710Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-05-05T03:04:28.952713Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-05-05T03:04:28.952721Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T03:04:28.952726Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-05T03:04:28.952730Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-05-05T03:04:28.952734Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-05T03:04:28.952851Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.953160Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.953199Z node 24 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:28.953206Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:28.953229Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:04:28.953251Z node 24 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:28.953256Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [24:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-05T03:04:28.953260Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [24:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-05-05T03:04:28.953382Z node 24 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.953392Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.953397Z node 24 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:28.953401Z node 24 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-05T03:04:28.953405Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:28.953476Z node 24 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.953485Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.953488Z node 24 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:28.953491Z node 24 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-05T03:04:28.953494Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T03:04:28.953503Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-05T03:04:28.953542Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:28.953547Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-05T03:04:28.953555Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:28.954088Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.954177Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:28.954190Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-05-05T03:04:28.954296Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-05T03:04:28.954320Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 34us result status StatusSuccess 2025-05-05T03:04:28.954417Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "export-1003" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTest::ShardFreezeRejectBadProtobuf >> TFlatTest::Ls >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings ------- [LD] {default-linux-x86_64, relwithdebinfo, pic} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so ld.lld: warning: version script assignment of 'global' to symbol 'BindSymbols' failed: symbol not defined |78.1%| [LD] {RESULT} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so >> ydb-tests-functional-config::import_test [GOOD] >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> TFlatTest::AutoSplitBySize >> ydb-tests-olap-data_quotas::import_test [GOOD] >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] |78.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] |78.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TFlatTest::LsPathId [GOOD] >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> ydb-tests-functional-cms::import_test [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] |78.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |78.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |78.2%| [TS] {RESULT} ydb/tests/functional/config/import_test |78.2%| [LD] {RESULT} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon >> TLocksFatTest::RangeSetRemove [GOOD] |78.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] |78.2%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-05-05T03:04:30.238689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792176667452650:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:30.238706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00041d/r3tmp/tmpK4IyTd/pdisk_1.dat 2025-05-05T03:04:30.303756Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11807 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.341379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.341402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.342439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:30.384261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746414270433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746414270433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746414270447 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746414270447 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-05-05T03:04:30.400978Z node 1 :TX_PROXY ERROR: Actor# [1:7500792176667453265:2322] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155 TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746414270433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746414270447 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746414270433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746414270447 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746414270454 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-05-05T03:04:30.687346Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500792176699554462:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:30.687390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00041d/r3tmp/tmpmnBCcH/pdisk_1.dat 2025-05-05T03:04:30.706976Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30550 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:04:30.794528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.794562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.795102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:30.796608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:30.796887Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... |78.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |78.2%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 >> TLocksFatTest::ShardLocks >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock |78.2%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test |78.2%| [TS] {RESULT} ydb/tests/functional/cms/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] >> TLocksFatTest::LocksLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-05-05T03:04:30.019000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792176481293622:2138];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:30.019026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d2c/r3tmp/tmpSLMJVL/pdisk_1.dat 2025-05-05T03:04:30.113844Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5563 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.150887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.169852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.184649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.184674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.186383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:30.232674Z node 1 :TX_PROXY ERROR: Actor# [1:7500792176481294225:2357] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-05T03:04:30.233321Z node 1 :TX_PROXY ERROR: Actor# [1:7500792176481294240:2365] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-05-05T03:04:30.233764Z node 1 :TX_PROXY ERROR: Actor# [1:7500792176481294246:2370] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-05T03:04:30.234126Z node 1 :TX_PROXY ERROR: Actor# [1:7500792176481294252:2375] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d2c/r3tmp/tmpmiMsFX/pdisk_1.dat 2025-05-05T03:04:30.495682Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:30.496309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:10827 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.586623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:30.586728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.586760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.587927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:30.588360Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.595870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |78.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/docs/generator/generator |78.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] |78.3%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |78.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |78.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |78.3%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test >> TLocksFatTest::ShardLocks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.539015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.539038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.539044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.539049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.539055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.539059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.539068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.539083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.539182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.539252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.555279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.555297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.555370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.562515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.562576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.562623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.578648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.578707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.578812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.578855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.583200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.583470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.583481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.583527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.583534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.583541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.583569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.590898Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.610997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.611059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.611114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.611167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.611178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.611693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.611721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.611762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.611770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.611775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.611779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.612104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.612114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.612119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.612386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.612394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.612398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.612404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.612941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.613274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.613308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.613465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.613487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.613495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.613547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.613566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.613591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.613602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.613928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.613933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.613960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.613964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.613970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.613974Z node 1 :FLAT_TX_SCHEMESHARD I ... 94046678944, txId: 281474976710758 2025-05-05T03:04:31.586009Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-05T03:04:31.586015Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:31.587980Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.588004Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.588011Z node 37 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:31.588015Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T03:04:31.588019Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:04:31.588034Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:04:31.588221Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:31.588413Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:04:31.588427Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:04:31.588433Z node 37 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:04:31.588574Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T03:04:31.588601Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:04:31.588693Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T03:04:31.588897Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:31.588920Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 158913792107 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:31.588928Z node 37 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T03:04:31.588953Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T03:04:31.588963Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:04:31.588967Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:31.588973Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:04:31.588976Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:31.588986Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:31.588995Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:04:31.589001Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T03:04:31.589007Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:31.589014Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T03:04:31.589019Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T03:04:31.589029Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:04:31.589034Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T03:04:31.589038Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-05T03:04:31.589042Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:04:31.589163Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T03:04:31.589456Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:31.589463Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:31.589502Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:04:31.589525Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:31.589531Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:204:2206], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T03:04:31.589536Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:204:2206], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-05-05T03:04:31.589668Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.589679Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.589684Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:31.589688Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T03:04:31.589693Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:31.589738Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.589747Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.589750Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:31.589754Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:04:31.589761Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:04:31.589771Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T03:04:31.589776Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [37:125:2151] 2025-05-05T03:04:31.589807Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:31.589812Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:04:31.589821Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:31.590137Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.590365Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:31.590383Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T03:04:31.590393Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T03:04:31.590401Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:31.590405Z node 37 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T03:04:31.590410Z node 37 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-05T03:04:31.590459Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:04:31.590729Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:04:31.590775Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:04:31.590782Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:04:31.590837Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:04:31.590850Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:04:31.590871Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [37:391:2380] TestWaitNotification: OK eventTxId 1003 |78.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |78.4%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] |78.4%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-05-05T03:04:29.392262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792173746961353:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:29.392337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000428/r3tmp/tmptkKVUN/pdisk_1.dat 2025-05-05T03:04:29.460103Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16669 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:29.523905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:29.523932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:29.527049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:29.527483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.538896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:04:29.538953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T03:04:29.538981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-05T03:04:29.538994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-05T03:04:29.539009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:04:29.539070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-05T03:04:29.539080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 waiting... 2025-05-05T03:04:29.539684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-05T03:04:29.539706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-05-05T03:04:29.539754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T03:04:29.539756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-05T03:04:29.539785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T03:04:29.539794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T03:04:29.539796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500792173746961864:2370], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-05-05T03:04:29.539802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500792173746961864:2370], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-05-05T03:04:29.539807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T03:04:29.539811Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T03:04:29.539817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 2025-05-05T03:04:29.540224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:29.540399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T03:04:29.540405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T03:04:29.540406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-05T03:04:29.540409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-05-05T03:04:29.540411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-05-05T03:04:29.540458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T03:04:29.540471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T03:04:29.540471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-05T03:04:29.540473Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-05-05T03:04:29.540492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:04:29.540497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-05-05T03:04:29.540508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:04:29.540509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-05-05T03:04:29.540511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:04:29.540615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-05-05T03:04:29.540638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:04:29.540847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-05T03:04:29.540858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-05T03:04:29.543688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746414269593, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-05T03:04:29.543727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746414269593 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-05T03:04:29.543739Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1746414269593, at schemeshard: 72057594046644480 2025-05-05T03:04:29.543765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-05-05T03:04:29.543802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-05T03:04:29.543811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:04:29.544132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T03:04:29.544141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-05T03:04:29.544174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T03:04:29.544186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T03:04:29.544201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500792173746961864:2370], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-05-05T03:04:29.544208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500792173746961864:2370], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-05-05T03:04:29.544212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T03:04:29.544217Z node 1 : ... 05-05T03:04:29.579818Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{36, redo 162b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-05-05T03:04:29.579820Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T03:04:29.579906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T03:04:29.579913Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-05T03:04:29.579915Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-05T03:04:29.579922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T03:04:29.579924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-05-05T03:04:29.579928Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-05-05T03:04:29.579931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-05-05T03:04:29.579949Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-05T03:04:29.579952Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T03:04:29.579968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T03:04:29.579971Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-05T03:04:29.579973Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-05T03:04:29.579978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T03:04:29.579979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-05-05T03:04:29.579981Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-05-05T03:04:29.579982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:04:29.579988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2025-05-05T03:04:29.579990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7500792173746962216:2302] 2025-05-05T03:04:29.579997Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-05T03:04:29.580000Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T03:04:29.580432Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580438Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:13:1:24576:107:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580441Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580443Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:122:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580446Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580447Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580451Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580452Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T03:04:29.580466Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} commited cookie 1 for step 13 2025-05-05T03:04:29.580476Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-05-05T03:04:29.580494Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-05-05T03:04:29.580496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-05-05T03:04:29.580499Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-05-05T03:04:29.580500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-05-05T03:04:29.580563Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7500792173746962217:2302] 2025-05-05T03:04:29.580569Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7500792173746962217:2302] 2025-05-05T03:04:29.580634Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7500792173746962217:2302] 2025-05-05T03:04:29.585208Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-05-05T03:04:29.585220Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-05T03:04:29.585240Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-05T03:04:29.585243Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T03:04:29.585265Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7500792173746961847:2352] 2025-05-05T03:04:29.585267Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7500792173746961847:2352] 2025-05-05T03:04:29.585271Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7500792173746961843:2352] EventType# 269156352 2025-05-05T03:04:29.734519Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] send [1:7500792173746961767:2079] 2025-05-05T03:04:29.734538Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] push event to server [1:7500792173746961767:2079] 2025-05-05T03:04:29.734575Z node 1 :PIPE_SERVER DEBUG: [72057594037936129] HandleSend Sender# [1:7500792173746961186:2079] EventType# 272039936 2025-05-05T03:04:29.852454Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500792172097623172:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:29.853779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000428/r3tmp/tmpKRvoYQ/pdisk_1.dat 2025-05-05T03:04:29.863528Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19919 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:29.955010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:29.955039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:29.955308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.956104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:29.965159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.047739Z node 2 :TX_PROXY ERROR: Actor# [2:7500792180687558706:2595] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000077 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-05-05T03:04:31.047768Z node 2 :TX_PROXY ERROR: Actor# [2:7500792180687558706:2595] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-05-05T03:04:29.117591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792172782553379:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:29.117620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000322/r3tmp/tmp2GKL7s/pdisk_1.dat 2025-05-05T03:04:29.164485Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64343 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:29.218921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:29.218965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:29.219973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:29.245884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.254641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.257744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.321708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.332942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000322/r3tmp/tmpeH8sT9/pdisk_1.dat 2025-05-05T03:04:31.234361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:04:31.236768Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64433 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:31.325165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:31.325198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:31.325572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:31.326597Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:04:31.327119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:31.330407Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:04:31.331508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.351203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.410859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.739585Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500792181840829182:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:31.739693Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000322/r3tmp/tmprPXJ5M/pdisk_1.dat 2025-05-05T03:04:31.757282Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64844 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:31.843547Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:31.843580Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:31.843937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:31.844401Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:31.845876Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.858711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:31.871241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:04:31.891291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] |78.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/ydb-tests-olap |78.5%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap >> ydb-tests-functional-ydb_cli::import_test [GOOD] |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.356643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.356667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.356672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.356677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.356683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.356686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.356695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.356708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.356795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.356864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.381709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.381730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.381813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.383156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.383187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.383211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.384054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.384087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.384181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.384216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.388167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.388388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.388397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.388443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.388453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.388459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.388481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.389818Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.444705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.444778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.444830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.444887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.444898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.450738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.450771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.450832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.450844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.450850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.450855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.451312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.451324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.451329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.451627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.451637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.451642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.451648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.452286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.452818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.452858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.453041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.453067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.453075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.453134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.453140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.453170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.453181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.453537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.453545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.453598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.453603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.453612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.453617Z node 1 :FLAT_TX_SCHEMESHARD I ... 94046678944, txId: 281474976710758 2025-05-05T03:04:32.506892Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-05T03:04:32.506896Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:32.507030Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.507046Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.507050Z node 40 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:32.507054Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T03:04:32.507058Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:04:32.507068Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:04:32.507408Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:32.507551Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:04:32.507557Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:04:32.507563Z node 40 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:04:32.507669Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T03:04:32.507691Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T03:04:32.507784Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.507836Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:32.507855Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 171798693988 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:32.507862Z node 40 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T03:04:32.507885Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T03:04:32.507894Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:04:32.507897Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:32.507902Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:04:32.507905Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:32.507912Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:32.507921Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:04:32.507927Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T03:04:32.507933Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:32.507937Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T03:04:32.507940Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T03:04:32.507949Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:04:32.507954Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T03:04:32.507958Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-05T03:04:32.507962Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:04:32.508190Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T03:04:32.508466Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:32.508472Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:32.508497Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:04:32.508519Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:32.508523Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T03:04:32.508531Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-05-05T03:04:32.508615Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.508626Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.508630Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:32.508634Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T03:04:32.508639Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:32.508696Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.508704Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.508707Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:32.508711Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:04:32.508714Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:04:32.508723Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T03:04:32.508728Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [40:126:2151] 2025-05-05T03:04:32.508761Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:32.508766Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:04:32.508774Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:32.509092Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.509294Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:32.509312Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T03:04:32.509321Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T03:04:32.509328Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:32.509333Z node 40 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T03:04:32.509340Z node 40 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-05T03:04:32.509426Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:04:32.509673Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:04:32.509716Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:04:32.509723Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:04:32.509779Z node 40 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:04:32.509793Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:04:32.509797Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:393:2382] TestWaitNotification: OK eventTxId 1003 |78.5%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |78.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |78.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |78.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |78.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-05-05T03:04:29.935953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792172803633715:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:29.935973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d1e/r3tmp/tmp3mK4x2/pdisk_1.dat 2025-05-05T03:04:30.009210Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7848 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:04:30.037898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.037933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.039053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.067826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.073846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.086756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.116894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.124984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d1e/r3tmp/tmp32AjJl/pdisk_1.dat 2025-05-05T03:04:30.839421Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:04:30.840165Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31719 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:04:30.934460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.934495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.935049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:30.935550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:30.937310Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.947420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:30.963881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:04:30.975107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.942575Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500792181077603313:2082];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:31.945155Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d1e/r3tmp/tmpVHPvCj/pdisk_1.dat 2025-05-05T03:04:31.957159Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26576 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:32.046000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:32.046036Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:32.046654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:32.047554Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:32.051970Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.067015Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.084009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.097963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.457178Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500792185657430873:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:32.457224Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d1e/r3tmp/tmpoez7Z3/pdisk_1.dat 2025-05-05T03:04:32.474513Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63648 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:32.559672Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:32.559708Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:32.559953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:32.560599Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:32.570530Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.577036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.602887Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.614762Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |78.5%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> TLocksTest::GoodNullLock [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.594142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.594169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.594174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.594179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.594185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.594188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.594215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.594229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.594324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.594406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.608007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.608027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.608112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.610054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.610096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.610113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.613332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.613379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.613483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.613537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.613998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.614236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.614248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.614295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.614303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.614311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.614335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.615850Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.632474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.632526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.632562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.632606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.632615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.633133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.633145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.633149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.633446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.633741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.633751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.633755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.634261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.634535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.634559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.634676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.634693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.634698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.634737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.634741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.634766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.634773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.635016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.635021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.635050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.635053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.635060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.635064Z node 1 :FLAT_TX_SCHEMESHARD I ... 94046678944, txId: 281474976710758 2025-05-05T03:04:33.436286Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-05T03:04:33.436290Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:33.436336Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.436344Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.436347Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:33.436350Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T03:04:33.436353Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:04:33.436360Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:04:33.436810Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:33.436969Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:04:33.436975Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:04:33.436980Z node 41 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:04:33.436994Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T03:04:33.437012Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T03:04:33.437097Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437116Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 176093661292 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437122Z node 41 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437141Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437147Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:04:33.437151Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:33.437155Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:04:33.437158Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:33.437165Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:33.437173Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:04:33.437178Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T03:04:33.437183Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:04:33.437187Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T03:04:33.437191Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T03:04:33.437198Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:04:33.437203Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T03:04:33.437207Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-05T03:04:33.437210Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:04:33.437299Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.437313Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.437616Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437624Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:33.437649Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:04:33.437670Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437674Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T03:04:33.437678Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T03:04:33.437763Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.437772Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.437776Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:33.437780Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T03:04:33.437784Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:33.437879Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.437888Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.437891Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:04:33.437895Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:04:33.437898Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:04:33.437908Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T03:04:33.437912Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [41:125:2151] 2025-05-05T03:04:33.437952Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:33.437956Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:04:33.438012Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:33.438586Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.438713Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:04:33.438731Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T03:04:33.438741Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T03:04:33.438749Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:33.438754Z node 41 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T03:04:33.438759Z node 41 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-05T03:04:33.438808Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:04:33.439124Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:04:33.439167Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:04:33.439174Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:04:33.439228Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:04:33.439243Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:04:33.439248Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:400:2389] TestWaitNotification: OK eventTxId 1003 >> ydb-tests-functional-scheme_shard::import_test [GOOD] >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |78.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |78.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/docker_compose |78.6%| [LD] {RESULT} $(B)/library/recipes/docker_compose/docker_compose |78.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |78.6%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] |78.6%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> ydb-tests-datashard-dml::import_test [GOOD] |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/udfs/common/roaring/test/ydb-library-yql-udfs-common-roaring-test |78.6%| [LD] {RESULT} $(B)/ydb/library/yql/udfs/common/roaring/test/ydb-library-yql-udfs-common-roaring-test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] |78.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/import_test >> ydb-tests-datashard-dml::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-05-05T03:04:29.572575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792173141087269:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:29.572765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpDnDWsg/pdisk_1.dat 2025-05-05T03:04:29.628038Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26348 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:29.658917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:29.684285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-05T03:04:29.686925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.701456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:29.701484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:29.702579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:29.750212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:29.760085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.034212Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500792177296452934:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:30.034249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpXBNczy/pdisk_1.dat 2025-05-05T03:04:30.063738Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28397 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.137453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.137490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.138812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:30.139648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.141013Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.147547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.162598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.183284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.517265Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500792176330481232:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:30.517285Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpKaWkRQ/pdisk_1.dat 2025-05-05T03:04:30.538314Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13700 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.621313Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.621343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.621694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:30.622100Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:30.626984Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:30.638074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.660140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:04:30.671000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:31.014444Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500792181080637563:2057];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:31.014465Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpX69ipN/pdisk_1.dat 2025-05-05T03:04:31.039930Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61904 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:31.124978Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:31.125019Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:31.125429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subop ... PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:32.659820Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:32.659876Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:32.660252Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.660895Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:32.664494Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:32.674423Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:04:32.729695Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:32.738751Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpB1zyMa/pdisk_1.dat 2025-05-05T03:04:33.098708Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:04:33.099118Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62972 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... 2025-05-05T03:04:33.190351Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:33.190383Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:33.190778Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:33.191312Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:33.192302Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:33.199490Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:04:33.255016Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:33.262982Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:33.592269Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500792189515969930:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:33.592291Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpFsXqeC/pdisk_1.dat 2025-05-05T03:04:33.620225Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21678 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:33.696172Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:33.696220Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T03:04:33.696702Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:33.697324Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:04:33.698018Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:04:33.713042Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:04:33.728319Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:33.739363Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:34.085142Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500792192878338738:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:34.085191Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d22/r3tmp/tmpRFHsh6/pdisk_1.dat 2025-05-05T03:04:34.104073Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29974 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:34.190576Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:34.190609Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:34.190979Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:04:34.191464Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:04:34.194679Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:34.200996Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:04:34.202136Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:34.262527Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:34.280147Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/udfs/common/clickhouse/client/test/library-yql-udfs-common-clickhouse-client-test |78.6%| [LD] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/test/library-yql-udfs-common-clickhouse-client-test |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |78.7%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize |78.7%| [TS] {RESULT} ydb/tests/datashard/dml/import_test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters |78.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |78.8%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] |78.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |78.8%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic |78.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/nemesis/driver/nemesis |78.8%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/driver/nemesis |78.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess |78.8%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |78.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] |78.8%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] |78.8%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/nemesis >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] |78.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/udfs/common/knn/test/ydb-library-yql-udfs-common-knn-test |78.8%| [LD] {RESULT} $(B)/ydb/library/yql/udfs/common/knn/test/ydb-library-yql-udfs-common-knn-test |78.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |78.8%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |78.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser |78.9%| [LD] {RESULT} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |78.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |78.9%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/benchmarks/report/ut/ydb-library-benchmarks-report-ut |78.9%| [LD] {RESULT} $(B)/ydb/library/benchmarks/report/ut/ydb-library-benchmarks-report-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty >> test.py::test[blocks-pg_to_dates--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] |78.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] >> ydb-tests-datashard-copy_table::import_test [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |78.9%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic >> test.py::test[select-cast_double_to_uint32-default.txt-ForceBlocks] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |78.9%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start |78.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/import_test >> ydb-tests-datashard-copy_table::import_test [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess >> test_recovery.py::TestRecovery::test_delete |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/simple_queue/simple_queue >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/example/ydb-tests-example |78.9%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/simple_queue >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate >> test.py::test[join-cbo_4tables--Results] [SKIPPED] >> test.py::test[join-flatten_columns2--Results] |79.0%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.334527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.334552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.334558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.334563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.334569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.334574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.334582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.334595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.334687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.334762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.347549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.347571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.347664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.349087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.349119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.349140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.349807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.349844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.349931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.349966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.350355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.350550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.350561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.350603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.350612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.350618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.350640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.352029Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.380680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.380744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.380792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.380846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.380858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.386615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.386647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.386702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.386714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.386721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.386725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.387162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.387176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.387182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.387533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.387546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.387552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.387558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.388270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.388702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.388738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.388917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.388943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.388951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.389004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.389011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.389037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.389049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.394477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.394490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.394534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.394540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.394551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.394558Z node 1 :FLAT_TX_SCHEMESHARD I ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.523852Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.523857Z node 55 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:40.523862Z node 55 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:04:40.523868Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:40.524222Z node 55 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.524279Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.524284Z node 55 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:40.524356Z node 55 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:40.524369Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T03:04:40.524384Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-05T03:04:40.525049Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-05T03:04:40.525110Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000009 2025-05-05T03:04:40.525701Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:40.525726Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 236223203435 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:40.525733Z node 55 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T03:04:40.525759Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-05T03:04:40.525767Z node 55 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T03:04:40.525771Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T03:04:40.525776Z node 55 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T03:04:40.525779Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T03:04:40.525787Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:40.525795Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T03:04:40.525801Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-05T03:04:40.525808Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T03:04:40.525812Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-05-05T03:04:40.525815Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-05-05T03:04:40.525826Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T03:04:40.525831Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-05T03:04:40.525835Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-05T03:04:40.525839Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-05T03:04:40.526155Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.526351Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.527149Z node 55 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:40.527162Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:40.527263Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:04:40.527291Z node 55 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:40.527296Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [55:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-05T03:04:40.527302Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [55:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-05-05T03:04:40.527664Z node 55 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.527678Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.527683Z node 55 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:40.527688Z node 55 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-05T03:04:40.527693Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:40.527870Z node 55 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.527881Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.527886Z node 55 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T03:04:40.527890Z node 55 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-05T03:04:40.527895Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T03:04:40.527907Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-05T03:04:40.527965Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:40.527970Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-05T03:04:40.527981Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:40.529337Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.529808Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T03:04:40.529934Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-05-05T03:04:40.530023Z node 55 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-05T03:04:40.530052Z node 55 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 37us result status StatusSuccess 2025-05-05T03:04:40.530134Z node 55 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.0%| [TS] {RESULT} ydb/tests/datashard/copy_table/import_test |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |79.0%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |79.0%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/simple_queue >> ydb-tests-fq-s3::import_test [GOOD] >> ydb-tests-datashard-dump_restore::import_test [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |79.0%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] |79.1%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |79.1%| [LD] {RESULT} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |79.1%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test |79.1%| [TS] {RESULT} ydb/tests/fq/s3/import_test >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |79.2%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test >> test.py::test[select-cast_double_to_uint32-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test.py::test[blocks-pg_to_dates--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_dates--Results] |79.2%| [LD] {BAZEL_UPLOAD} $(B)/contrib/python/moto/bin/moto_server >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |79.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.144796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.144823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.144828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.144832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.144842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.144845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.144853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.144867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.144948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.145017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.158615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.158639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.162170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.162495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.162541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.163695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.163748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.163853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.164046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.164906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.165195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.165207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.165224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.165231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.165236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.165272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.166669Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.189448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.189534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.189624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.189690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.189701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.190515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.190544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.190606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.190626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.190632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.190638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.191119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.191136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.191149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.191525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.191536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.191542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.191549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.192195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.192560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.192597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.192774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.192799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.192810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.192865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.192872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.192900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.192911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.193318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.193326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.193366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.193372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.193439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.193446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.193457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.193462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.193468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.193471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.193476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.193481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.193486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.193490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.193501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.193507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.193511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.193837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.193853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:43.113739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.113779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:43.114673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T03:04:43.114728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T03:04:43.114928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:43.114958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:43.114966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T03:04:43.115003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T03:04:43.115033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:43.118546Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3453:5417], attempt# 0 2025-05-05T03:04:43.123895Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3453:5417], sender# [1:3452:5416] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:20687 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5067E442-3F2F-4E29-9209-E84F1C295E74 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T03:04:43.125782Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3453:5417], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:20687 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 923AA351-6EF6-409A-B19C-9845BB81B1D9 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T03:04:43.128727Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3453:5417], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-05-05T03:04:43.128876Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T03:04:43.129152Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:20687 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9FA74A03-96A3-4946-A546-AEEA117A3671 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T03:04:43.130679Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3453:5417], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-05-05T03:04:43.132546Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3453:5417], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:04:43.132800Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:43.134598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:43.134619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:04:43.134709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:43.134716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T03:04:43.134853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.134868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T03:04:43.136076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:43.136124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:04:43.136130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:04:43.136137Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:04:43.136144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:04:43.136172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-05T03:04:43.138449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-05T03:04:43.171039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.171062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:43.171093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.171109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.171123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:43.171130Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.171136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:43.171143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:43.171204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:43.172191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.172322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.172332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:43.172344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:43.172348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.172352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:43.172355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.172360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:43.172379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:43.172385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.172390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:43.172394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:43.172434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:43.173324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:43.173336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3438:5403] TestWaitNotification: OK eventTxId 102 |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.183115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.183144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.183149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.183154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.183165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.183170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.183179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.183195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.183279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.183353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.206389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.206414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.217208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.220230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.220284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.227448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.227517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.227633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.227961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.228743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.229048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.229061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.229083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.229089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.229095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.229130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.230563Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.249028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.249111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.249188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.249252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.249265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.250309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.250347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.250423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.250444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.250450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.250455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.250950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.250964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.250969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.251302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.251313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.251320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.251326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.251881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.252241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.252287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.252485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.252511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.252519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.252575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.252582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.252613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.252625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.253036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.253044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.253096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.253102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.253175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.253182Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.253198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.253202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.253207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.253210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.253215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.253220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.253225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.253229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.253241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.253247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.253251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.253575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.253595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... ffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29778 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6E2239C8-A439-4D29-BA65-CF9A603EDAFC amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-05-05T03:04:43.249448Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-05-05T03:04:43.249508Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T03:04:43.249536Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29778 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0CC7FCC5-97D2-47BA-B253-C8AF5E011D25 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-05-05T03:04:43.250229Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-05-05T03:04:43.250275Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T03:04:43.250288Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29778 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 81CBA878-F716-4CF5-ABEB-FC7D0B4143F2 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-05-05T03:04:43.250884Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-05-05T03:04:43.250901Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3453:5417], success# 1, error# , multipart# 1, uploadId# 1 2025-05-05T03:04:43.252468Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3453:5417], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29778 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8ACFE301-492E-4515-ADD1-9E9457AAD4DB amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-05-05T03:04:43.254332Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3453:5417], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-05-05T03:04:43.254413Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:43.256664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.256684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:43.256731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.256744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.256758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:43.256762Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.256766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:43.256774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:43.256987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:43.257962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.258092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.258103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:43.258117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:43.258121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.258126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:43.258129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.258133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:43.258163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:43.258168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.258171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:43.258174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:43.258229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:43.259010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:43.259022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3438:5403] TestWaitNotification: OK eventTxId 102 >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |79.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 >> test.py::test[select-cast_double_to_uint32-default.txt-Results] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:27.238159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:27.238186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.238191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:27.238219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:27.238230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:27.238235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:27.238245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:27.238259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:27.238344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:27.238413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:27.256291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:27.256313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:27.259917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:27.263561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:27.263621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:27.264882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:27.264936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:27.265053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.265317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:27.266144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.266447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.266460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.266481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:27.266489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.266495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:27.266530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.267768Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:27.288722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:27.288791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.288856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:27.288917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:27.288930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.289761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.289789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:27.289846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.289867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:27.289873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:27.289878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:27.290287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.290301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:27.290306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:27.290643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.290654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.290659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.290666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.291260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:27.291648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:27.291686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:27.291860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:27.291881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:27.291887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.291939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:27.291945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:27.291970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:27.291981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:27.292451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:27.292462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:27.292500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:27.292506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:27.292570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:27.292578Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:27.292590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.292596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.292601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:27.292604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.292608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:27.292613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:27.292618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:27.292621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:27.292651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:27.292656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:27.292660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:27.292974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:27.292987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... ksum: } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4477 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3E5B1170-0A26-40BF-AF93-DA145347CE29 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-05-05T03:04:43.274478Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-05-05T03:04:43.274525Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T03:04:43.274559Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4477 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1DA41142-4621-4F87-A918-996117783CDD amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-05-05T03:04:43.275480Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-05-05T03:04:43.275528Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T03:04:43.275543Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4477 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 68A67FCE-9DD9-4786-AA98-4B4CED5EF01E amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-05-05T03:04:43.276162Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-05-05T03:04:43.276175Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3453:5417], success# 1, error# , multipart# 1, uploadId# 1 2025-05-05T03:04:43.283056Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3453:5417], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4477 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F06FF340-E685-4969-997D-BD97EB5285B4 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-05-05T03:04:43.285350Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3453:5417], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-05-05T03:04:43.285463Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:04:43.288353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.288376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:04:43.288407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.288421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T03:04:43.288436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:43.288441Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.288446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:04:43.288454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:04:43.288548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:43.290002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.290190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:04:43.290225Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:04:43.290245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:43.290250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.290256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:04:43.290260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.290264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:04:43.290311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T03:04:43.290321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:04:43.290330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:04:43.290334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:04:43.290385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:04:43.292867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:43.292888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3438:5403] TestWaitNotification: OK eventTxId 102 >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/oltp_workload/oltp_workload >> test.py::test[blocks-pg_to_dates--Results] [GOOD] >> test.py::test[blocks-sort_two_mix--ForceBlocks] |79.4%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |79.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] |79.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] |79.9%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |79.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText [GOOD] |79.9%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess |79.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |80.0%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 >> test_recovery.py::TestRecovery::test_delete [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> test.py::test[join-flatten_columns2--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat--Results] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission |80.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> test.py::test[aggregate-group_by_rollup_udf--Results] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test.py::test[pg-tpch-q07-default.txt-Results] >> test.py::test[type_v3-insert_struct_v3_with_native--ForceBlocks] >> test.py::test[blocks-sort_two_mix--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] >> test.py::test[select-qualified_all_and_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/mdb_mock/recipe |80.2%| [LD] {RESULT} $(B)/ydb/tests/tools/mdb_mock/recipe |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |80.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |80.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |80.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> test.py::test[action-action_eval_cluster_table--Results] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] [GOOD] >> test.py::test[column_group-hint-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty >> test.py::test[column_group-hint_diff_grp_fail3--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--Results] [SKIPPED] >> test.py::test[column_group-many_inserts--ForceBlocks] [SKIPPED] >> test.py::test[column_group-many_inserts--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-ForceBlocks] |80.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered |80.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] |80.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 >> test.py::test[select-scalar_subquery-default.txt-ForceBlocks] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic |80.3%| [LD] {BAZEL_UPLOAD} $(B)/library/recipes/docker_compose/docker_compose |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |80.3%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common >> test.py::test[join-inner_trivial_from_concat--Results] [GOOD] >> test.py::test[join-join_right_cbo--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/sql/ydb-tests-sql |80.4%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/local_ydb/local_ydb |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |80.4%| [LD] {RESULT} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |80.4%| [LD] {RESULT} $(B)/ydb/public/tools/local_ydb/local_ydb |80.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |80.4%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |80.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |80.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |80.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |80.5%| [LD] {RESULT} $(B)/ydb/tests/stress/statistics_workload/statistics_workload >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test.py::test[action-action_eval_cluster_table--Results] [GOOD] >> test.py::test[action-empty_do-default.txt-Results] |80.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] >> test.py::test[type_v3-insert_struct_v3_with_native--ForceBlocks] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-05-05T03:04:50.404601Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500792260895355949:2048] with connection to localhost:61186:local 2025-05-05T03:04:50.404663Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:04:50.596869Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:04:50.596895Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:04:50.597083Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-05-05T03:04:50.613227Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-05-05T03:04:50.613248Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-05-05T03:04:51.033116Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500792263886587038:2048] with connection to localhost:61186:local 2025-05-05T03:04:51.033172Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:04:51.111108Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T03:04:51.111125Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:04:51.477420Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500792265412379472:2048] with connection to localhost:61186:local 2025-05-05T03:04:51.477496Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:04:51.507201Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:04:51.507225Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:04:51.507378Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:04:51.609534Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:04:51.609552Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:04:51.609696Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:04:51.642405Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Constraint violated. Table: `local/TStorageServiceTestShouldNotCreateCheckpointTwice/checkpoints_metadata`., code: 2012
: Error: Conflict with existing key., code: 2012 2025-05-05T03:04:51.642425Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:04:51.945335Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500792268807970931:2048] with connection to localhost:61186:local 2025-05-05T03:04:51.945395Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:04:51.984244Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:04:51.984263Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:04:51.988198Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:04:52.046374Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-05-05T03:04:52.046391Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:04:52.487900Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7500792271481136989:2048] with connection to localhost:61186:local 2025-05-05T03:04:52.487953Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:04:52.517426Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:04:52.517450Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:04:52.517641Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:04:52.628015Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:04:52.628036Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:04:52.628217Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T03:04:52.660436Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T03:04:52.660458Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T03:04:52.660584Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:04:52.676605Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T03:04:52.676626Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> test.py::test[select-scalar_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Results] >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] |80.6%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/statistics_workload >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> test.py::test[column_order-winfunc-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-winfunc-default.txt-Results] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue |80.6%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 8028 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:00 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:05 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I May04 0:00 [kworker/0:1-cgroup_destroy] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I May04 0:00 [kworker/16:0-rcu_gp] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 1 ... ts". Create session OK 2025-05-05T03:04:52.110131Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:04:52.110133Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:04:52.110170Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-05-05T03:04:52.110177Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:04:52.110178Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:04:52.110326Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:04:52.110335Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:04:52.110336Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:04:52.111099Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T03:04:52.111109Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:04:52.111110Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:04:52.111936Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:04:52.111946Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:04:52.111947Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:04:52.128627Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:04:52.128649Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:04:52.150451Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:04:52.150471Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:04:52.169714Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:04:52.169734Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:04:52.171448Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:04:52.171463Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:04:52.171511Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:04:52.171518Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:04:52.171697Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:04:52.171698Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:04:52.171700Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:04:52.171701Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:04:52.171769Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:04:52.171771Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:04:52.171791Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:04:52.171794Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:04:52.171829Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:04:52.171831Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:04:52.171846Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:04:52.171847Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:04:52.171880Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:04:52.171882Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:04:52.171895Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:04:52.171896Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:04:52.171962Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:04:52.171964Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:04:52.171970Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:04:52.171972Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": >> test_stop.py::TestStop::test_stop_query[v1-streaming] |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] |80.7%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |80.7%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-ForceBlocks] |80.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-json--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] |80.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |80.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution >> test.py::test[join-join_right_cbo--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Results] >> test.py::test[action-empty_do-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:04:30.503696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:30.503721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:30.503726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:30.503731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:30.503741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:30.503745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:30.503754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:30.503767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:30.503861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:30.503930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:30.516274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:04:30.516299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:30.519461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:30.519706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:30.519750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:30.520856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:30.520913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:30.520986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:30.521193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:30.521865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:30.522128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:30.522139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:30.522153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:30.522158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:30.522162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:30.522188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.523336Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:04:30.540861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:30.540934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.541012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:30.541062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:30.541072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.541830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:30.541855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:30.541902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.541911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:30.541915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:30.541920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:30.542281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.542292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:30.542296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:30.542660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.542672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.542678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:30.542686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:30.543274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:30.543671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:30.543711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:30.543878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:30.543900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:30.543908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:30.543967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:30.543974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:30.544006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:30.544019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:30.544510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:30.544524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:30.544564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:30.544570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:30.544636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:30.544643Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:04:30.544655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:30.544659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:30.544664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:04:30.544668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:30.544672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:04:30.544678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:04:30.544683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:04:30.544687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:04:30.544700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:30.544705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:04:30.544710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:04:30.545030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:04:30.545044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 57594046678944 2025-05-05T03:04:55.681665Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-05-05T03:04:55.681675Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-05-05T03:04:55.681700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:55.681860Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.681874Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.681878Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T03:04:55.681883Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T03:04:55.681888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:55.681931Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.681940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.681944Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T03:04:55.681948Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-05T03:04:55.681952Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T03:04:55.681961Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-05-05T03:04:55.682409Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:55.682655Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-05-05T03:04:55.682662Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-05-05T03:04:55.682668Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-05-05T03:04:55.682751Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-05-05T03:04:55.682775Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-05-05T03:04:55.682835Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.682891Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:55.682912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:55.682918Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-05T03:04:55.682939Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-05-05T03:04:55.682948Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-05-05T03:04:55.682952Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-05-05T03:04:55.682957Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-05-05T03:04:55.682960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-05-05T03:04:55.682967Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:04:55.682975Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T03:04:55.682980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-05-05T03:04:55.682987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-05-05T03:04:55.682991Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-05-05T03:04:55.682994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-05-05T03:04:55.683002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T03:04:55.683007Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-05-05T03:04:55.683011Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-05T03:04:55.683015Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-05T03:04:55.683353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.683683Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:55.683693Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:55.683720Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-05T03:04:55.683739Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:55.683744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-05-05T03:04:55.683749Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-05-05T03:04:55.683882Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.683893Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.683897Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T03:04:55.683901Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T03:04:55.683906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:04:55.683991Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.684000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.684004Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T03:04:55.684007Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-05T03:04:55.684011Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T03:04:55.684018Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-05-05T03:04:55.684023Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:289:2276] 2025-05-05T03:04:55.684683Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.684748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T03:04:55.684764Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-05-05T03:04:55.684774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-05-05T03:04:55.684781Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:55.684785Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-05-05T03:04:55.684790Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-05-05T03:04:55.685102Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:55.685123Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:04:55.685128Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:704:2644] TestWaitNotification: OK eventTxId 102 >> test.py::test[column_order-winfunc-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_and_join--ForceBlocks] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=273572) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/protobuf/py3/google/protobuf/text_encoding.py:79: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/protobuf/py3/google/protobuf/text_encoding.py:79: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |80.9%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] |80.9%| [LD] {BAZEL_UPLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |80.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/udfs/common/roaring/test/ydb-library-yql-udfs-common-roaring-test >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] |80.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/test/library-yql-udfs-common-clickhouse-client-test |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] |80.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |80.9%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |81.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/benchmarks/report/ut/ydb-library-benchmarks-report-ut >> test.py::test[action-eval_folder--Results] [GOOD] >> test.py::test[action-eval_like--Results] >> test.py::test[select-select_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] >> test.py::test[type_v3-json--ForceBlocks] [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission >> test.py::test[type_v3-json--Results] |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |81.0%| [LD] {RESULT} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |81.0%| [LD] {RESULT} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-ForceBlocks] |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |81.0%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |81.0%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |81.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test.py::test[distinct-distinct_and_join--ForceBlocks] [GOOD] |81.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c03/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c03/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 266250 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist >> test.py::test[distinct-distinct_and_join--Results] |81.1%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--ForceBlocks] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot >> test.py::test[action-eval_like--Results] [GOOD] >> test.py::test[action-eval_regexp--Results] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] >> test.py::test[join-join_without_correlation_and_struct_access--Results] [GOOD] |81.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform >> test.py::test[join-left_join_null_column-off-Results] [SKIPPED] |81.2%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform >> test.py::test[join-left_join_right_pushdown_nested_right--Results] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.206955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.206978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.206984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.206989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.206995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.206999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.207008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.207022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.207116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.207185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.220419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.220441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.220535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.222429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.222465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.222489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.223242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.223279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.223373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.223416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.223866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.224089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.224100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.224150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.224158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.224164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.224189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.225658Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.246291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.246366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.246430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.246490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.246501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.247273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.247288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.247293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.247644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.247956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.247970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.247977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.248625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.248975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.249011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.249203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.249227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.249233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.249285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.249292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.249325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.249336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.249736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.249745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.249786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.249792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.249802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.249808Z node 1 :FLAT_TX_SCHEMESHARD I ... xId: 281474976710761 2025-05-05T03:04:59.810162Z node 111 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 7 2025-05-05T03:04:59.810165Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-05T03:04:59.810179Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:04:59.810310Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810317Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T03:04:59.810327Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:04:59.810523Z node 111 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:04:59.810552Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810556Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:04:59.810562Z node 111 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810642Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:04:59.810665Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T03:04:59.810731Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810748Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 476741372012 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810755Z node 111 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810775Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:04:59.810783Z node 111 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:04:59.810786Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:04:59.810791Z node 111 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:04:59.810794Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:04:59.810801Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:59.810806Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:04:59.810809Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:04:59.810813Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:04:59.810816Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:04:59.810820Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:04:59.810824Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:04:59.810828Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:04:59.810830Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T03:04:59.810832Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T03:04:59.811222Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.811310Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-05-05T03:04:59.811317Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-05T03:04:59.811331Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.811362Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:04:59.811415Z node 111 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:59.811421Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:59.811451Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T03:04:59.811472Z node 111 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:59.811476Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [111:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:04:59.811480Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [111:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:04:59.811591Z node 111 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.811598Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.811601Z node 111 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:04:59.811604Z node 111 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:04:59.811606Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T03:04:59.811650Z node 111 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.811657Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.811659Z node 111 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:04:59.811661Z node 111 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T03:04:59.811663Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:04:59.811668Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:04:59.811671Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [111:123:2149] 2025-05-05T03:04:59.811704Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:04:59.811707Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:04:59.811713Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:04:59.811998Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.812190Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:04:59.812205Z node 111 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:04:59.812212Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:04:59.812217Z node 111 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:04:59.812220Z node 111 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:04:59.812223Z node 111 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T03:04:59.812257Z node 111 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:04:59.812422Z node 111 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:04:59.812464Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:04:59.812471Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:04:59.812526Z node 111 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:04:59.812539Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:04:59.812544Z node 111 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [111:749:2707] TestWaitNotification: OK eventTxId 1004 >> TStateStorageTest::ShouldLoadIncrementSnapshot >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] |81.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/udfs/common/knn/test/ydb-library-yql-udfs-common-knn-test >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] |81.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |81.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] >> test.py::test[distinct-distinct_and_join--Results] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-ForceBlocks] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed [GOOD] |81.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |81.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000914/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-05-05T03:04:53.981985Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test.py::test[type_v3-mixed_with_columns--ForceBlocks] [GOOD] |81.3%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[type_v3-mixed_with_columns--Results] >> test.py::test[action-eval_regexp--Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] |81.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/token_accessor_mock/recipe |81.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |81.3%| [LD] {RESULT} $(B)/ydb/tests/tools/token_accessor_mock/recipe >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] |81.3%| [LD] {RESULT} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] >> test.py::test[select-table_funcs_spec-default.txt-Results] [GOOD] >> test.py::test[select-type_assert-default.txt-ForceBlocks] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-ForceBlocks] |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/cfg/bin/ydb_configure >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] |81.4%| [LD] {RESULT} $(B)/ydb/tools/cfg/bin/ydb_configure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 8028 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:00 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:05 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I May04 0:00 [kworker/0:1-cgroup_destroy] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I May04 0:00 [kworker/16:0-rcu_gp] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 1 ... ies" 2025-05-05T03:05:01.846184Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:05:01.846227Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:05:01.846237Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:05:01.847224Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:05:01.847246Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:05:01.847248Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:05:01.847372Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:05:01.847391Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:05:01.847396Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:05:01.847420Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:05:01.847423Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:05:01.847424Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:05:01.847658Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:05:01.847672Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:05:01.847674Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:05:01.862844Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:05:01.862869Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:05:01.879486Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:05:01.879504Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:05:01.891731Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:05:01.891753Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:05:01.891809Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:05:01.891824Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:05:01.891978Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:05:01.891987Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:05:01.891996Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:05:01.891999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:05:01.892115Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:05:01.892120Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:05:01.892123Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:05:01.892124Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:05:01.892185Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:05:01.892193Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:05:01.892243Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:05:01.892252Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:05:01.892252Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:05:01.892254Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:05:01.892304Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:05:01.892305Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:05:01.892306Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:05:01.892308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:05:01.892368Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:05:01.892371Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:05:01.894149Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:05:01.894160Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:05:02.245196Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:185: Revision of the query has been changed already. Please restart the request with a new revision 2025-05-05T03:05:02.245414Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DeleteQueryRequest - DeleteQueryResult: {query_id: "utquebkktude5pt2q82o" previous_revision: 100 } ERROR: {
: Error: Revision of the query has been changed already. Please restart the request with a new revision, code: 1003 } |81.4%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/cfg >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown >> test.py::test[distinct-distinct_by_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |81.4%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_pure--Results] |81.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/docs/generator/generator |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |81.5%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/PyHamcrest/py3/hamcrest/core/base_description.py:43: DeprecationWarning: Call append_description_of instead of append_value |81.5%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] >> test.py::test[select-type_assert-default.txt-ForceBlocks] [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store >> test.py::test[select-type_assert-default.txt-Results] |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |81.5%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests >> test_actorsystem.py::TestWithHybridNodeWith13Cpu::test >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--ForceBlocks] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-left_join_right_pushdown_nested_right--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Results] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] >> test.py::test[udf-udaf_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-Results] >> test.py::test[select-type_assert-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter |81.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] |81.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/mdb_mock/recipe |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> test.py::test[udf-udaf_lambda-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-ForceBlocks] >> test_actorsystem.py::TestWithHybridNodeWith13Cpu::test [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] |81.5%| [LD] {RESULT} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] [GOOD] |81.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/olap_workload/olap_workload >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests >> test.py::test[action-evaluate_pure--Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] |81.6%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint |81.6%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |81.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |81.6%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] [GOOD] |81.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] >> test.py::test[dq-blacklisted_pragmas1--ForceBlocks] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--Results] |81.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] >> test.py::test[dq-blacklisted_pragmas1--Results] [SKIPPED] >> test.py::test[hor_join-group_ranges--ForceBlocks] |81.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |81.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |81.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] |81.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/benchmarks/template/ut/ydb-library-benchmarks-template-ut |81.7%| [LD] {RESULT} $(B)/ydb/library/benchmarks/template/ut/ydb-library-benchmarks-template-ut |81.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |81.7%| [LD] {RESULT} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |81.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] |81.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-ForceBlocks] [GOOD] |81.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] >> test_disposition.py::TestContinueMode::test_disposition_from_time[v1-mvp_external_ydb_endpoint0] >> test.py::test[union_all-union_all_fields-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.100810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.100835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.100841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.100846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.100852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.100855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.100865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.100880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.100972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.101048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.134916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.135153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.135237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.155558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.155606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.155637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.156371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.156409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.156509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.156552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.156909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.157136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.157147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.157192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.157199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.157206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.157229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.170520Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.189354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.189430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.189492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.189574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.189586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.194587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.194630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.194710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.194741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.194748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.194753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.195424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.195438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.195443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.195902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.195914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.195920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.195928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.196587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.196969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.197009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.197195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.197219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.197227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.197282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.197289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.197319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.197332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.197730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.197739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.197781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.197787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.197797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.197803Z node 1 :FLAT_TX_SCHEMESHARD I ... 5T03:05:11.904375Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:11.904462Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-05T03:05:11.904517Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-05-05T03:05:11.905193Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:11.905206Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-05-05T03:05:11.905221Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T03:05:11.905850Z node 121 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:05:11.905905Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:05:11.905911Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:11.905917Z node 121 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:05:11.906014Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:05:11.906045Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T03:05:11.906263Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:05:11.906294Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 519691044971 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:05:11.906303Z node 121 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T03:05:11.906338Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:05:11.906348Z node 121 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:11.906352Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:11.906357Z node 121 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:11.906360Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:11.906374Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:11.906387Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T03:05:11.906393Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:05:11.906399Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:11.906403Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:05:11.906407Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:05:11.906417Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T03:05:11.906424Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:05:11.906427Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T03:05:11.906430Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-05T03:05:11.907899Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.907935Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.908523Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T03:05:11.908542Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T03:05:11.908563Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:11.908932Z node 121 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:05:11.908943Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:05:11.908987Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T03:05:11.909015Z node 121 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:05:11.909021Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [121:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:05:11.909027Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [121:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:05:11.909218Z node 121 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.909231Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.909236Z node 121 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:11.909242Z node 121 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:05:11.909247Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:11.909376Z node 121 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.909385Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.909389Z node 121 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:11.909393Z node 121 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-05T03:05:11.909397Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T03:05:11.909408Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:05:11.909416Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [121:125:2151] 2025-05-05T03:05:11.909461Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:11.909466Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T03:05:11.909478Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:11.909997Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.910283Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:11.910301Z node 121 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:05:11.910311Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:05:11.910320Z node 121 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:05:11.910323Z node 121 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:05:11.910327Z node 121 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T03:05:11.910376Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:11.910644Z node 121 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:05:11.910691Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:05:11.910697Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:05:11.910761Z node 121 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:05:11.910777Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:05:11.910782Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [121:979:2881] TestWaitNotification: OK eventTxId 1004 >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> TFlatTest::AutoSplitMergeQueue [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] [GOOD] >> test.py::test[produce-reduce_with_python_presort--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row--Results] [SKIPPED] >> test.py::test[ql_filter-integer_optional_null--Results] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] |81.8%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/olap_workload >> test.py::test[join-lookupjoin_bug7646_subst--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Results] |81.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |81.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/simple_queue/simple_queue >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [GOOD] |81.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_num_access--ForceBlocks] |81.9%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/simple_queue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-05-05T03:05:10.014483Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-05-05T03:05:10.056162Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-05-05T03:05:10.147798Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-05-05T03:05:11.223982Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-05-05T03:05:11.273616Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-05-05T03:05:11.273648Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test.py::test[hor_join-group_ranges--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_ranges--Results] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-ForceBlocks] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith14Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] |81.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-05-05T03:04:30.556719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792178269638391:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:04:30.556814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d17/r3tmp/tmp71oo34/pdisk_1.dat 2025-05-05T03:04:30.629179Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6566 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:04:30.661164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:04:30.661197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:04:30.663352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:04:30.670113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.673843Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:04:30.679396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-05T03:04:30.684047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1746414270783 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 B-0 2025-05-05T03:04:30.955262Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T03:04:30.956757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-05-05T03:04:30.958654Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T03:04:30.959282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-05-05T03:04:31.057761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T03:04:31.057822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-05-05T03:04:31.057851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2025-05-05T03:04:31.057902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T03:04:31.058409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-05-05T03:04:31.151421Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T03:04:31.152707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-05-05T03:04:31.162825Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.010s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583142 0 0)b }, ecr=1.000 2025-05-05T03:04:31.163692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583020 rowCount 2 cpuUsage 0 2025-05-05T03:04:31.253234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T03:04:31.253291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583020 row count 2 2025-05-05T03:04:31.253316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583020 2025-05-05T03:04:31.253351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T03:04:31.253393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-05-05T03:04:31.313933Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T03:04:31.318241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 2025-05-05T03:04:31.329587Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874688 0 0)b }, ecr=1.000 2025-05-05T03:04:31.330162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874538 rowCount 3 cpuUsage 0 2025-05-05T03:04:31.418260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T03:04:31.418305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874538 row count 3 2025-05-05T03:04:31.418332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874538 2025-05-05T03:04:31.418364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T03:04:31.418759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2025-05-05T03:04:31.454853Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=0, 2 blobs 1r (max 1), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T03:04:31.455961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166040 rowCount 4 cpuUsage 0 2025-05-05T03:04:31.493803Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 4} end=0, 5 blobs 4r (max 4), put Spent{time=0.038s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (25166234 0 0)b }, ecr=1.000 2025-05-05T03:04:31.499923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166056 rowCount 4 cpuUsage 0 2025-05-05T03:04:31.558431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T03:04:31.558479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 25166056 row count 4 2025-05-05T03:04:31.558502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 4, DataSize 25166056 2025-05-05T03:04:31.558537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T03:04:31.559659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-2 2025-05-05T03:04:31.611808Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 5} end=0, 2 blobs 1r (max 1), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T03:04:31.617635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 31457558 rowCount 5 cpuUsage 0 2025-05-05T03:04:31.668308Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.31, eph 5} end=0, 5 blobs 5r (max 5), put Spent{time=0.054s,wait=0.001s,interrup ... 057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) WriteKVRow: 251 EraseKVRow: 221 WriteKVRow: 252 EraseKVRow: 222 WriteKVRow: 253 EraseKVRow: 223 WriteKVRow: 254 EraseKVRow: 224 WriteKVRow: 255 EraseKVRow: 225 WriteKVRow: 256 EraseKVRow: 226 WriteKVRow: 257 EraseKVRow: 227 WriteKVRow: 258 EraseKVRow: 228 WriteKVRow: 259 EraseKVRow: 229 WriteKVRow: 260 EraseKVRow: 230 WriteKVRow: 261 EraseKVRow: 231 WriteKVRow: 262 EraseKVRow: 232 WriteKVRow: 263 EraseKVRow: 233 WriteKVRow: 264 EraseKVRow: 234 WriteKVRow: 265 EraseKVRow: 235 WriteKVRow: 266 EraseKVRow: 236 WriteKVRow: 267 EraseKVRow: 237 WriteKVRow: 268 EraseKVRow: 238 WriteKVRow: 269 EraseKVRow: 239 WriteKVRow: 270 EraseKVRow: 240 WriteKVRow: 271 EraseKVRow: 241 WriteKVRow: 272 EraseKVRow: 242 WriteKVRow: 273 EraseKVRow: 243 WriteKVRow: 274 EraseKVRow: 244 WriteKVRow: 275 EraseKVRow: 245 WriteKVRow: 276 EraseKVRow: 246 WriteKVRow: 277 EraseKVRow: 247 WriteKVRow: 278 EraseKVRow: 248 WriteKVRow: 279 EraseKVRow: 249 WriteKVRow: 280 EraseKVRow: 250 WriteKVRow: 281 EraseKVRow: 251 WriteKVRow: 282 EraseKVRow: 252 WriteKVRow: 283 EraseKVRow: 253 WriteKVRow: 284 EraseKVRow: 254 WriteKVRow: 285 EraseKVRow: 255 WriteKVRow: 286 EraseKVRow: 256 WriteKVRow: 287 EraseKVRow: 257 WriteKVRow: 288 EraseKVRow: 258 WriteKVRow: 289 EraseKVRow: 259 WriteKVRow: 290 EraseKVRow: 260 WriteKVRow: 291 EraseKVRow: 261 WriteKVRow: 292 EraseKVRow: 262 WriteKVRow: 293 EraseKVRow: 263 WriteKVRow: 294 EraseKVRow: 264 WriteKVRow: 295 EraseKVRow: 265 WriteKVRow: 296 EraseKVRow: 266 WriteKVRow: 297 EraseKVRow: 267 WriteKVRow: 298 EraseKVRow: 268 WriteKVRow: 299 EraseKVRow: 269 TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) EraseKVRow: 270 EraseKVRow: 271 EraseKVRow: 272 EraseKVRow: 273 EraseKVRow: 274 EraseKVRow: 275 EraseKVRow: 276 EraseKVRow: 277 EraseKVRow: 278 EraseKVRow: 279 EraseKVRow: 280 EraseKVRow: 281 EraseKVRow: 282 EraseKVRow: 283 EraseKVRow: 284 EraseKVRow: 285 EraseKVRow: 286 EraseKVRow: 287 EraseKVRow: 288 EraseKVRow: 289 EraseKVRow: 290 EraseKVRow: 291 EraseKVRow: 292 EraseKVRow: 293 EraseKVRow: 294 EraseKVRow: 295 EraseKVRow: 296 EraseKVRow: 297 EraseKVRow: 298 EraseKVRow: 299 TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) 2025-05-05T03:05:12.868841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-05T03:05:12.868861Z node 3 :IMPORT WARN: Table profiles were not loaded TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) 2025-05-05T03:05:14.469088Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-05-05T03:05:14.469104Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-05-05T03:05:14.470973Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-05-05T03:05:14.470987Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-05T03:05:14.470990Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-05-05T03:05:14.563286Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037898 not found 2025-05-05T03:05:14.563300Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037900 not found 2025-05-05T03:05:14.576233Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037899 not found 2025-05-05T03:05:14.576248Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037901 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414298069 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test.py::test[hor_join-group_ranges--Results] [GOOD] >> test.py::test[hor_join-skip_yamr--ForceBlocks] >> test.py::test[ql_filter-integer_optional_null--Results] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/tests-datasource-postgresql ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/00090b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-05-05T03:05:08.759033Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:05:08.759014Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T03:05:08.706787Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |81.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/tests-datasource-postgresql ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.533733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.533756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.533761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.533766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.533771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.533774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.533783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.533796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.533887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.533958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.546414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.546432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.546530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.548109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.548142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.548166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.549058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.549115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.549254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.549312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.549809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.549988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.549997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.550036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.550044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.550049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.550069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.551377Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.570522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.570587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.570635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.570690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.570701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.571376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.571390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.571395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.571703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.571976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.571990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.571996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.572614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.572907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.572934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.573093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.573114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.573120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.573169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.573175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.573199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.573210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.573529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.573537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.573587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.573593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.573602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.573608Z node 1 :FLAT_TX_SCHEMESHARD I ... 5T03:05:16.766754Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:16.767601Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-05T03:05:16.767663Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:05:16.767864Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:16.767872Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:05:16.767886Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:05:16.768492Z node 157 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:05:16.768588Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:05:16.768623Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:05:16.768719Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:05:16.768726Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:16.768732Z node 157 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T03:05:16.768868Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:05:16.768900Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 674309867628 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:05:16.768910Z node 157 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T03:05:16.768946Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:05:16.768955Z node 157 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:16.768960Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:16.768965Z node 157 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:16.768968Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:16.768981Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:16.768994Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:05:16.768999Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:05:16.769007Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:16.769011Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:05:16.769015Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:05:16.769027Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:05:16.769033Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:05:16.769037Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T03:05:16.769040Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:05:16.769183Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.769976Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.770055Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-05-05T03:05:16.770070Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-05T03:05:16.770087Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:16.770535Z node 157 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:05:16.770551Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:05:16.770618Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:05:16.770649Z node 157 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:05:16.770655Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [157:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:05:16.770660Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [157:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:05:16.770833Z node 157 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.770849Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.770854Z node 157 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:16.770859Z node 157 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:05:16.770864Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:16.771029Z node 157 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.771039Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.771043Z node 157 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:16.771047Z node 157 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:05:16.771051Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:05:16.771062Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:05:16.771067Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [157:123:2149] 2025-05-05T03:05:16.771218Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:16.771230Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:05:16.771243Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:16.772176Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.772355Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:16.772559Z node 157 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:05:16.772594Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:05:16.772606Z node 157 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:05:16.772611Z node 157 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:05:16.772637Z node 157 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T03:05:16.772885Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:16.773303Z node 157 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:05:16.773383Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:05:16.773391Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:05:16.773466Z node 157 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:05:16.773483Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:05:16.773488Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [157:724:2683] TestWaitNotification: OK eventTxId 1003 >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] [GOOD] |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/tests-datasource-clickhouse |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] |81.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/tests-datasource-clickhouse |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/ydb_serializable/replay/replay >> test.py::test[weak_field-weak_field_num_access--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Results] |82.0%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |82.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/kqprun |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |82.0%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> test_actorsystem.py::TestWithHybridNodeWith14Cpu::test [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] |82.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |82.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |82.0%| [LD] {RESULT} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut >> test.py::test[join-lookupjoin_inner_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o--Results] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] |82.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> test.py::test[stream_lookup_join-lookup_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] |82.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |82.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |82.1%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming >> test.py::test[hor_join-skip_yamr--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=283917) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:08] send response localhost:15435/?database=local ::1 - - [05/May/2025 03:05:08] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000e9e/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_kill_pq_bill.py.TestKillPqBill.test_do_not_bill_pq.v1-mvp_external_ydb_endpoint0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback |82.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 >> test.py::test[aggr_factory-bitand-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] |82.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |82.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |82.3%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |82.3%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] |82.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |82.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[hor_join-skip_yamr--ForceBlocks] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] |82.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store >> test.py::test[aggregate-group_by_session_only_distinct--Results] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] [GOOD] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] |82.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.743964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.743983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.743987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.743990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.743994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.743996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.744003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.744014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.744102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.744174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.754454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.754473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.754543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.755811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.755835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.755855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.756533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.756574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.756658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.756684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.756994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.757144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.757150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.757190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.757195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.757199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.757216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.758149Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.771784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.771840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.771884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.771935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.771944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.772455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.772474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.772508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.772514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.772517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.772520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.772817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.772826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.772828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.773082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.773090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.773093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.773097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.773470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.773797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.773823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.773947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.773966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.773971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.774008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.774012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.774028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.774035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.774352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.774358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.774380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.774383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.774390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.774394Z node 1 :FLAT_TX_SCHEMESHARD I ... xId: 281474976710761 2025-05-05T03:05:22.751976Z node 158 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 7 2025-05-05T03:05:22.751982Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-05-05T03:05:22.751998Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:22.752076Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:22.752082Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T03:05:22.752097Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:05:22.752340Z node 158 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:05:22.752421Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:05:22.752427Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:22.752432Z node 158 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:05:22.752726Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:05:22.752750Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T03:05:22.753172Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:05:22.753196Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 678604834916 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:05:22.753217Z node 158 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T03:05:22.753242Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:05:22.753251Z node 158 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:22.753256Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:22.753261Z node 158 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:22.753264Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:22.753272Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:22.753283Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:05:22.753288Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:05:22.753295Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:22.753298Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:05:22.753301Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:05:22.753312Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:05:22.753318Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:05:22.753322Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T03:05:22.753328Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T03:05:22.753507Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.753531Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-05-05T03:05:22.753539Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-05T03:05:22.754339Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.754389Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:22.754745Z node 158 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:05:22.754768Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:05:22.754805Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T03:05:22.754830Z node 158 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:05:22.754848Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [158:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:05:22.754853Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [158:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:05:22.755001Z node 158 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.755014Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.755019Z node 158 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:22.755024Z node 158 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:05:22.755028Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T03:05:22.755118Z node 158 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.755128Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.755132Z node 158 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:22.755136Z node 158 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T03:05:22.755143Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:05:22.755154Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:05:22.755159Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [158:129:2154] 2025-05-05T03:05:22.755224Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:22.755231Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:05:22.755243Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:22.757017Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.757433Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:22.757459Z node 158 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:05:22.757473Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:05:22.757483Z node 158 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:05:22.757487Z node 158 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:05:22.757492Z node 158 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T03:05:22.757557Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:22.757996Z node 158 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:05:22.758058Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:05:22.758066Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:05:22.758139Z node 158 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:05:22.758155Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:05:22.758160Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [158:747:2705] TestWaitNotification: OK eventTxId 1004 >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] >> test.py::test[join-lookupjoin_inner_2o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000bfd/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000bfd/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 293469 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_actorsystem.py::TestWithHybridNodeWith15Cpu::test |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/connector-tests-datasource-ydb |82.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/connector-tests-datasource-ydb >> test_disposition.py::TestContinueMode::test_disposition_from_time[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> TStateStorageTest::ShouldDeleteNoCheckpoints >> test.py::test[window-win_func_aggr_4func_no_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] >> test_auditlog.py::test_single_dml_query_logged[upsert] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 |82.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common >> test.py::test[table_range-concat_sorted_max_tables--Results] [GOOD] >> test.py::test[table_range-range_with_view--ForceBlocks] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] |82.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph |82.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[schema-copy-read_schema-Results] |82.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] >> TStateStorageTest::ShouldGetMultipleStates [GOOD] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TFlatTest::SplitEmptyAndWrite |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.5%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/datasource-ms_sql_server >> test_actorsystem.py::TestWithHybridNodeWith15Cpu::test [GOOD] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] [GOOD] >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] |82.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/datasource-ms_sql_server |82.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/token_accessor_mock/recipe >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/join/yql-providers-generic-connector-tests-join |82.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/join/yql-providers-generic-connector-tests-join >> TFlatTest::SplitBoundaryRead [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--ForceBlocks] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] |82.6%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [GOOD] |82.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/benchmarks/template/ut/ydb-library-benchmarks-template-ut |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/connector-tests-datasource-mysql >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] |82.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/connector-tests-datasource-mysql |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |82.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-05-05T03:05:29.644565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792429059294923:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:05:29.644617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d11/r3tmp/tmpCPs3lo/pdisk_1.dat 2025-05-05T03:05:29.702833Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20046 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:05:29.741194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:05:29.769289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:05:29.778617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:05:29.778646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:05:29.779715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:05:29.838396Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:05:29.839162Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:05:29.844011Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:05:29.844739Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1746414329870 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-05-05T03:05:29.867919Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:29.867933Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:29.867983Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:29.867990Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:29.868178Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-05-05T03:05:29.869106Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.33, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1746414329870 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T03:05:29.897534Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-05T03:05:29.897549Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-05-05T03:05:29.897903Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-05-05T03:05:29.897935Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-05T03:05:29.897937Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-05T03:05:30.236981Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500792433484073629:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:05:30.237050Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d11/r3tmp/tmp0flpnv/pdisk_1.dat 2025-05-05T03:05:30.253807Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29313 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:05:30.341268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:05:30.341300Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:05:30.341578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:05:30.342322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:05:30.342921Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:05:30.378735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:05:30.398720Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:05:30.402502Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:05:30.403627Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:05:30.406278Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T03:05:30.418037Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxI ... ode 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.435327Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.435343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-05-05T03:05:30.435371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-05-05T03:05:30.435446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-05-05T03:05:30.435500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-05-05T03:05:30.435521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037890 2025-05-05T03:05:30.435523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2025-05-05T03:05:30.438907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-05-05T03:05:30.438923Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-05-05T03:05:30.439042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.439332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-05-05T03:05:30.439341Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-05-05T03:05:30.439346Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 3 -> 131 2025-05-05T03:05:30.439420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.439443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.439446Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T03:05:30.439450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-05-05T03:05:30.439501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-05-05T03:05:30.439551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-05-05T03:05:30.440846Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:30.440869Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:30.440920Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:30.440950Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:30.440996Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T03:05:30.442564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-05T03:05:30.442581Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-05T03:05:30.442704Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 131 -> 132 2025-05-05T03:05:30.442738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T03:05:30.442826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.442856Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T03:05:30.442859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-05-05T03:05:30.442924Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T03:05:30.442932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7500792433484074132:2236], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 3 2025-05-05T03:05:30.442943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.442951Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T03:05:30.442957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-05-05T03:05:30.443055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-05-05T03:05:30.443505Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-05-05T03:05:30.443521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-05-05T03:05:30.443529Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-05-05T03:05:30.443533Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-05-05T03:05:30.443536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-05T03:05:30.443552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-05-05T03:05:30.443594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-05T03:05:30.443605Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-05-05T03:05:30.443616Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-05-05T03:05:30.443622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-05T03:05:30.443626Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-05-05T03:05:30.443627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-05T03:05:30.443630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-05-05T03:05:30.443639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500792433484074549:2359] message: TxId: 281474976715678 2025-05-05T03:05:30.443643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-05T03:05:30.443646Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715678:0 2025-05-05T03:05:30.443649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715678:0 2025-05-05T03:05:30.443679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T03:05:30.443734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-05-05T03:05:30.443750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T03:05:30.443752Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414330430 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> test.py::test[join-lookupjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_nullable--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type--Results] >> test_actorsystem.py::TestWithHybridNodeWith4Cpu::test >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test.py::test[table_range-range_with_view--ForceBlocks] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[table_range-range_with_view--Results] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] >> test_stop.py::TestStop::test_stop_query[v1-streaming] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [GOOD] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [GOOD] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |82.7%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] |82.7%| [PY] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> test_actorsystem.py::TestWithHybridNodeWith4Cpu::test [GOOD] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/driver/nemesis |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[tpch-q12-default.txt-ForceBlocks] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] |82.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] |82.8%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/nemesis |82.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[schema-other--Results] [SKIPPED] >> test.py::test[schema-select_field-row_spec-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=313489) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:14] send response localhost:24611/?database=local ::1 - - [05/May/2025 03:05:14] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:28] send response localhost:24611/?database=local ::1 - - [05/May/2025 03:05:28] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - |82.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_actorsystem.py::TestWithHybridNodeWith16Cpu::test |82.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |82.8%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[aggregate-list_nullable--Results] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] |82.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [GOOD] |82.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=319377) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:22] send response localhost:3715/?database=local ::1 - - [05/May/2025 03:05:22] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:32] send response localhost:3715/?database=local ::1 - - [05/May/2025 03:05:32] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/tests-datasource-oracle >> test.py::test[window-win_func_over_group_by_list_names--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] |82.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/tests-datasource-oracle >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |82.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/olap_workload/olap_workload >> TCheckpointStorageTest::ShouldCreateCheckpoint >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> test_actorsystem.py::TestWithHybridNodeWith16Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=316808) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Results] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] |82.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> test.py::test[tpch-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q12-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] >> test.py::test[join-mapjoin_on_very_complex_type--Results] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |82.9%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/olap_workload |82.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/example/ydb-tests-example >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] >> test_select_1.py::TestSelect1::test_select_pg[v1] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.383483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.383507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.383512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.383517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.383522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.383526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.383534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.383547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.383631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.383700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.396876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.396895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.396980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.402320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.402366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.402394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.403206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.403247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.403342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.403381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.403764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.403982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.403991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.404043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.404051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.404057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.404080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.405466Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.454881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.454946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.454993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.455048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.455058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.459505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.459537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.459587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.459596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.459602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.459606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.464806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.464827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.464833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.465273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.465286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.465292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.465298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.466099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.466604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.466641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.466829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.466858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.466866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.466925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.466935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.466962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.466975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.467394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.467404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.467436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.467441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.467459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.467469Z node 1 :FLAT_TX_SCHEMESHARD I ... LAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:05:39.427029Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:39.427217Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.427231Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.427235Z node 169 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:39.427239Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-05-05T03:05:39.427244Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T03:05:39.427258Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:39.427918Z node 169 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:05:39.427947Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:05:39.427973Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:05:39.428158Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:05:39.428164Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:05:39.428170Z node 169 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T03:05:39.428256Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:05:39.428277Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 725849475180 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:05:39.428288Z node 169 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T03:05:39.428318Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:05:39.428329Z node 169 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:39.428333Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:39.428338Z node 169 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:39.428341Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:39.428350Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:39.428359Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T03:05:39.428364Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:05:39.428370Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:39.428375Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:05:39.428378Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:05:39.428388Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T03:05:39.428393Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:05:39.428397Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T03:05:39.428401Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-05T03:05:39.428596Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.428657Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.428943Z node 169 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:05:39.428953Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:05:39.428990Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T03:05:39.429014Z node 169 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:05:39.429020Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [169:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:05:39.429025Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [169:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:05:39.429193Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.429206Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.429211Z node 169 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:39.429215Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:05:39.429220Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:39.429319Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.429330Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.429334Z node 169 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:39.429338Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-05T03:05:39.429342Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T03:05:39.429352Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:05:39.429356Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [169:123:2149] 2025-05-05T03:05:39.429380Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:39.429384Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T03:05:39.429394Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:39.430051Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.430475Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:39.430504Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:39.430515Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:05:39.430547Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:05:39.430556Z node 169 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:05:39.430560Z node 169 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:05:39.430565Z node 169 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T03:05:39.431380Z node 169 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:05:39.431444Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:05:39.431453Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:05:39.431529Z node 169 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:05:39.431546Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:05:39.431552Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [169:975:2878] TestWaitNotification: OK eventTxId 1004 |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithHybridNodeWith5Cpu::test |83.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |83.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] |83.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_append--Results] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-win_func_over_group_by_list_names--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] >> test.py::test[tpch-q12-default.txt-Results] [GOOD] >> test.py::test[type_v3-bare_yson--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-bare_yson--Results] |83.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests >> test.py::test[type_v3-bare_yson--Results] [SKIPPED] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000bec/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000bec/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 338557 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/0008eb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-05-05T03:05:34.333201Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:05:34.333183Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-05-05T03:05:34.315132Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_actorsystem.py::TestWithComputeNodeWith27Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_actorsystem.py::TestWithHybridNodeWith5Cpu::test [GOOD] |83.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] |83.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable >> test.py::test[join-mergejoin_any_no_join_reduce--Results] [GOOD] >> test.py::test[join-mergejoin_force_per_link--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names--Results] >> test_actorsystem.py::TestWithHybridNodeWith6Cpu::test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] |83.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] >> test_actorsystem.py::TestWithHybridNodeWith17Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] |83.1%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/statistics_workload |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] [GOOD] |83.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/local_ydb/local_ydb >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] [GOOD] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |83.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |83.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/tests-datasource-postgresql >> test.py::test[window-win_func_part_by_expr_new-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |83.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/oltp_workload/oltp_workload ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=273127) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/cfg/bin/ydb_configure >> test_actorsystem.py::TestWithHybridNodeWith6Cpu::test [GOOD] |83.2%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/cfg |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests >> test_actorsystem.py::TestWithComputeNodeWith27Cpu::test [GOOD] |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith17Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/0008e5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-05-05T03:05:39.563533Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |83.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[select-column_labels-default.txt-Results] [GOOD] |83.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] [GOOD] |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/tests-datasource-clickhouse |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] [GOOD] |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/sql/ydb-tests-sql >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] [GOOD] |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/connector-tests-datasource-ydb |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] >> test_actorsystem.py::TestWithHybridNodeWith7Cpu::test >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb >> test_quota_exhaustion.py::TestYdbWorkload::test |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/datasource-ms_sql_server |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/yql-providers-generic-connector-tests-join |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/connector-tests-datasource-mysql |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/replay >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |83.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/tool |83.4%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |83.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/tests-datasource-oracle >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=326867) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:21] send response localhost:31430/?database=local ::1 - - [05/May/2025 03:05:21] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:41] send response localhost:31430/?database=local ::1 - - [05/May/2025 03:05:41] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] [GOOD] |83.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |83.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] |83.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith7Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] >> test_select_1.py::TestSelect1::test_select_pg[v1] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000be4/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000be4/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 350787 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |83.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] >> test_actorsystem.py::TestWithComputeNodeWith28Cpu::test >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState |83.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] >> test_actorsystem.py::TestWithHybridNodeWith8Cpu::test >> TStorageServiceTest::ShouldGetState [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] >> test.py::test[ytflow-select--ForceBlocks] [SKIPPED] >> test.py::test[ytflow-select--Results] [SKIPPED] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[ytflow-select--Results] [SKIPPED] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Results] >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-05-05T03:05:55.090164Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500792536995426855:2048] with connection to localhost:6741:local 2025-05-05T03:05:55.090225Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:05:55.119152Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:05:55.119169Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:05:55.119321Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:05:55.231779Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:05:55.231805Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:05:55.543311Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500792543983982573:2048] with connection to localhost:6741:local 2025-05-05T03:05:55.543414Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:05:55.571114Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:05:55.571131Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:05:55.571266Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:05:55.702350Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:05:55.702372Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:05:55.702597Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T03:05:55.738364Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T03:05:55.738391Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T03:05:55.738629Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-05-05T03:05:55.771653Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-05-05T03:05:55.771679Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-05-05T03:05:55.771801Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T03:05:55.802138Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T03:05:56.003906Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500792540684381444:2048] with connection to localhost:6741:local 2025-05-05T03:05:56.003948Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:05:56.029850Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:05:56.029874Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:05:56.030066Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:05:56.140906Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:05:56.140928Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:05:56.141100Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:05:56.189930Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T03:05:56.189952Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:05:56.190134Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T03:05:56.216916Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T03:05:56.216945Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T03:05:56.217095Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:05:56.243503Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-05-05T03:05:56.243523Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:05:56.243687Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-05-05T03:05:56.265375Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-05-05T03:05:56.265403Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-05-05T03:05:56.265584Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-05-05T03:05:56.293093Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-05-05T03:05:56.293119Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-05-05T03:05:56.294435Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-05-05T03:05:56.315762Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-05-05T03:05:56.315780Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-05-05T03:05:56.315905Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T03:05:56.342711Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T03:05:56.667117Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500792547481303372:2048] with connection to localhost:6741:local 2025-05-05T03:05:56.667162Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:05:56.702609Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:05:56.702624Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:05:56.702786Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:05:56.855877Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:05:56.855896Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:05:56.856075Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-05-05T03:05:56.875194Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-05-05T03:05:56.875241Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-05-05T03:05:56.875462Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-05-05T03:05:56.875485Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-05-05T03:05:56.940708Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-05-05T03:05:56.940740Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-05-05T03:05:56.940750Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-05-05T03:05:56.942167Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-05-05T03:05:56.999193Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-05-05T03:05:56.999232Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-05-05T03:05:56.999926Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> test_actorsystem.py::TestWithHybridNodeWith8Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith38Cpu::test >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.290405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.290431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.290437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.290442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.290448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.290452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.290460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.290476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.290567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.290639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.303740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.303765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.303857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.305349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.305381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.305402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.306075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.306115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.306223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.306267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.306620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.314545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.314579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.314640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.314653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.314660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.314687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.318870Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.336864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.336938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.336999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.337054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.337065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.338571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.338607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.338672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.338684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.338689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.338694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.342548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.342568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.342575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.346401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.346419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.346427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.346439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.347122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.347669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.347707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.347878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.347903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.347911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.347966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.347975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.348009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.348021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.348398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.348406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.348454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.348460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.348471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.348478Z node 1 :FLAT_TX_SCHEMESHARD I ... 0/1, is published: true 2025-05-05T03:05:58.454789Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-05-05T03:05:58.454837Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 Forgetting tablet 72075186233409549 2025-05-05T03:05:58.455242Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-05-05T03:05:58.455294Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:05:58.455909Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:58.455924Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:05:58.455939Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:05:58.456058Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:05:58.456084Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T03:05:58.456160Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:05:58.456180Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 1017907251307 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:05:58.456187Z node 237 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T03:05:58.456211Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:05:58.456221Z node 237 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:58.456225Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:58.456230Z node 237 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:05:58.456254Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:58.456263Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:58.456273Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:05:58.456281Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:05:58.456288Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:05:58.456293Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:05:58.456297Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:05:58.456306Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:05:58.456311Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:05:58.456315Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T03:05:58.456319Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:05:58.456912Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.456968Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.457045Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-05-05T03:05:58.457054Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-05T03:05:58.457101Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T03:05:58.457107Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T03:05:58.457422Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:58.457486Z node 237 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:05:58.457494Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:05:58.457540Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:05:58.457568Z node 237 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:05:58.457574Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [237:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:05:58.457580Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [237:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:05:58.457768Z node 237 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.457783Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.457789Z node 237 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:58.457796Z node 237 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:05:58.457801Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:05:58.457944Z node 237 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.457958Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.457962Z node 237 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:05:58.457967Z node 237 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:05:58.457971Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:05:58.457983Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:05:58.457988Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [237:125:2151] 2025-05-05T03:05:58.458030Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:05:58.458035Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:05:58.458045Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:05:58.458574Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.458895Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:05:58.458917Z node 237 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:05:58.458927Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:05:58.458936Z node 237 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:05:58.458940Z node 237 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:05:58.458944Z node 237 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T03:05:58.459001Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:05:58.459321Z node 237 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:05:58.459372Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:05:58.459379Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:05:58.459437Z node 237 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:05:58.459456Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:05:58.459460Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [237:878:2810] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=296299) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:47] send response localhost:63320/?database=local ::1 - - [05/May/2025 03:05:47] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] [GOOD] >> test_select_1.py::TestSelect1::test_select_pg[v2] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith28Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] |83.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> TFlatTest::RejectByPerShardReadSize >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] >> TFlatTest::MiniKQLRanges ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.354438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.354463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.354469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.354474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.354479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.354483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.354492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.354505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.354611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.354688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.378982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.379007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.379096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.386536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.386584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.386604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.387290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.387329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.387420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.387457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.390499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.390777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.390784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.390808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.392168Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.412688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.412751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.412800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.412850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.412859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.413343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.413368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.413411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.413419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.413424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.413428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.413751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.413760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.413765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.414125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.414135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.414140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.414147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.414709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.415031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.415060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.415214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.415235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.415242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.415291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.415297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.415327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.415338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.415700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.415708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.415747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.415752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.415761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.415767Z node 1 :FLAT_TX_SCHEMESHARD I ... Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:06:03.364232Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:06:03.364355Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:06:03.364361Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:06:03.364371Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:06:03.364579Z node 261 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:06:03.364646Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:06:03.364651Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:06:03.364656Z node 261 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:06:03.364997Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:06:03.365047Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:06:03.365076Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T03:06:03.365140Z node 261 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:06:03.365159Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 1120986466413 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:06:03.365167Z node 261 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T03:06:03.365191Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:06:03.365200Z node 261 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:06:03.365205Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:06:03.365211Z node 261 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:06:03.365214Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:06:03.365222Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:06:03.365232Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:06:03.365237Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:06:03.365244Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:06:03.365249Z node 261 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:06:03.365253Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:06:03.365260Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:06:03.365266Z node 261 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:06:03.365270Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T03:06:03.365274Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:06:03.365914Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-05-05T03:06:03.365926Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-05T03:06:03.365942Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T03:06:03.365947Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T03:06:03.365964Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.365981Z node 261 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:06:03.366480Z node 261 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:06:03.366495Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:06:03.366531Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:06:03.366560Z node 261 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:06:03.366565Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [261:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:06:03.366569Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [261:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:06:03.366735Z node 261 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.366749Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.366753Z node 261 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:06:03.366758Z node 261 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:06:03.366763Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:06:03.366843Z node 261 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.366865Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.366869Z node 261 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:06:03.366873Z node 261 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:06:03.366877Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:06:03.366886Z node 261 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:06:03.366891Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [261:125:2151] 2025-05-05T03:06:03.366969Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:06:03.366974Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:06:03.366984Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:06:03.367616Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.367749Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:06:03.367799Z node 261 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:06:03.367810Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:06:03.367818Z node 261 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:06:03.367823Z node 261 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:06:03.367827Z node 261 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T03:06:03.368163Z node 261 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:06:03.368484Z node 261 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:06:03.368530Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:06:03.368536Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:06:03.368599Z node 261 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:06:03.368616Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:06:03.368621Z node 261 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [261:889:2821] TestWaitNotification: OK eventTxId 1003 >> TFlatTest::CopyTableAndRead >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] >> TFlatTest::MergeEmptyAndWrite [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-05-05T03:06:05.823860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792586035951385:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:06:05.823885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00040b/r3tmp/tmpYQ2gZF/pdisk_1.dat 2025-05-05T03:06:05.876977Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23947 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:06:05.953048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:06:05.953078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T03:06:05.954322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:06:05.955036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:06:05.958269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:06:05.964017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00040b/r3tmp/tmpl29AzB/pdisk_1.dat 2025-05-05T03:06:06.310329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:06:06.311021Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30047 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:06:06.397411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:06:06.397439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:06:06.397765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.398488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:06:06.398893Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:06:06.403981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.430647Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:06:06.431263Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:06:06.435655Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:06:06.436721Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414366466 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T03:06:06.448519Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.448979Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.449026Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.449174Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.449201Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.449240Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:06:06.449334Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.449526Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.449662Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:06:06.449850Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.449889Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.449956Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:06:06.450091Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.450125Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.450189Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:06:06.450364Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.450403Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.450475Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:06:06.450574Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.450824Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.450920Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:06:06.451073Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.451111Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.451162Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:06:06.451270Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.451848Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.451943Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:06:06.452070Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.452095Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.452135Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:06:06.452221Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.452721Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.452816Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:06:06.452996Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:06:06.453034Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:06.453102Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:06:06.453308Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T03:06:06.453321Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T03:06:06.453377Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:06.453451Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:06:06.453586Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T03:06:06.453596Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T03:06:06.464979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Tran ... on: 7 2025-05-05T03:06:06.513244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:06:06.513254Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-05-05T03:06:06.513258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-05-05T03:06:06.513259Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-05-05T03:06:06.513260Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-05-05T03:06:06.513262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:06:06.513265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2025-05-05T03:06:06.513281Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-05-05T03:06:06.513296Z node 2 :TX_DATASHARD DEBUG: Complete [1746414366557 : 281474976715687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7500792590043960154:2147], exec latency: 0 ms, propose latency: 0 ms 2025-05-05T03:06:06.513311Z node 2 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715687 state PreOffline TxInFly 0 2025-05-05T03:06:06.513336Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T03:06:06.513336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1746414366557 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 173 } } 2025-05-05T03:06:06.513339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-05-05T03:06:06.513357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1746414366557 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 173 } } 2025-05-05T03:06:06.513373Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1746414366557 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 173 } } 2025-05-05T03:06:06.513398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-05-05T03:06:06.513412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-05-05T03:06:06.513418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-05-05T03:06:06.513438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792590043960727 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-05T03:06:06.513445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-05-05T03:06:06.513455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792590043960727 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-05T03:06:06.513463Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-05-05T03:06:06.513468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7500792590043960727 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-05T03:06:06.513475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513482Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513487Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715687:0 129 -> 240 2025-05-05T03:06:06.513516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513567Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-05-05T03:06:06.513571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513574Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T03:06:06.513578Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-05-05T03:06:06.513634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:06:06.513684Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-05-05T03:06:06.513691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-05T03:06:06.513693Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-05-05T03:06:06.513694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-05T03:06:06.513696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-05-05T03:06:06.513706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500792590043960972:2393] message: TxId: 281474976715687 2025-05-05T03:06:06.513714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-05T03:06:06.513716Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-05-05T03:06:06.513718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715687:0 2025-05-05T03:06:06.513745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T03:06:06.514610Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:06:06.514631Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T03:06:06.514983Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:06:06.515054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792590043960727 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T03:06:06.515070Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.515184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.515178Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:06:06.516146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:06:06.516174Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T03:06:06.516204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:06:06.516246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:06:06.516260Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:06:06.516269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:06:06.516282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:06:06.516293Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T03:06:06.516344Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:06:06.516369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:06:06.516374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:06:06.516382Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] >> test_select_1.py::TestSelect1::test_select_pg[v2] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-05-05T03:06:05.916428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792584998955535:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:06:05.916460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d0b/r3tmp/tmp8nWxTQ/pdisk_1.dat 2025-05-05T03:06:05.978754Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12235 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:06:06.018878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:06:06.018915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:06:06.020110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:06:06.049431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:06:06.061707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:06:06.089571Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:06:06.090389Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:06:06.101410Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:06:06.102156Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-05-05T03:06:06.120312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:06:06.120400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.120528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T03:06:06.120542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-05T03:06:06.120544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:06:06.120552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:06:06.120555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T03:06:06.120597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T03:06:06.120628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:06:06.120855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:06:06.120875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T03:06:06.121066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-05T03:06:06.121106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-05T03:06:06.121169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T03:06:06.121178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T03:06:06.121219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-05T03:06:06.121247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T03:06:06.121253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500792584998956046:2238], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-05T03:06:06.121262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500792584998956046:2238], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-05T03:06:06.121270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T03:06:06.121276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-05T03:06:06.121368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:06:06.121390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-05-05T03:06:06.121763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:06:06.121778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:06:06.121780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T03:06:06.121783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-05T03:06:06.121787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T03:06:06.121831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:06:06.121844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:06:06.121845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T03:06:06.121847Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-05-05T03:06:06.121849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-05-05T03:06:06.121856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715676, ready parts: 0/1, is published: true 2025-05-05T03:06:06.121871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T03:06:06.121894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-05T03:06:06.121904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715676, partId: 0, tablet: 72057594037968897 2025-05-05T03:06:06.121910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T03:06:06.121913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:4, partId: 0 2025-05-05T03:06:06.121930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715676, at schemeshard: 72057594046644480 2025-05-05T03:06:06.121932Z no ... ected at leader tablet# 72075186224037889, clientId# [2:7500792587489382887:2376], serverId# [2:7500792587489382890:2684], sessionId# [0:0:0] 2025-05-05T03:06:06.586429Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7500792587489382882:2374], serverId# [2:7500792587489382885:2681], sessionId# [0:0:0] 2025-05-05T03:06:06.586442Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:06:06.586445Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:06:06.586453Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:06:06.586455Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:06:06.586638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792587489382353 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-05T03:06:06.586655Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.586691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792587489382354 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-05-05T03:06:06.586694Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.586750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.586762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.586980Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T03:06:06.586985Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T03:06:06.587038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792587489382660 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-05T03:06:06.587043Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792587489382660 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-05T03:06:06.587062Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792587489382659 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T03:06:06.587075Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500792587489382659 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T03:06:06.587088Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.587163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.587168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.587173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:06:06.587302Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T03:06:06.587307Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T03:06:06.587309Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:06:06.587311Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:06:06.587713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:06:06.587820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:06:06.587857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T03:06:06.587889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:06:06.587932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:06:06.587947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T03:06:06.587957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:06:06.587961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:06:06.587964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:06:06.588001Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T03:06:06.588029Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T03:06:06.588032Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T03:06:06.588035Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T03:06:06.588100Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:06:06.588139Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T03:06:06.588187Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:06:06.588190Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T03:06:06.588236Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:06:06.588238Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:06:06.588267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:06:06.588271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:06:06.588282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:06:06.588284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T03:06:06.588308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:06:06.588309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:06:06.588313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:06:06.588318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:06:06.588320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:06:06.588324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:06:06.588333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:06:06.588435Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T03:06:06.588444Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T03:06:06.588696Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:06:06.588710Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T03:06:06.588949Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:06:06.588963Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T03:06:06.886062Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-05T03:06:06.886315Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-05T03:06:06.886498Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-05T03:06:06.886600Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=345276) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:40] send response localhost:5349/?database=local ::1 - - [05/May/2025 03:05:40] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Results] [SKIPPED] >> test.py::test[binding-named_node_corr_names-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith38Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] >> TCheckpointStorageTest::ShouldRegisterCoordinator >> TFlatTest::RejectByPerRequestSize [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] >> test.py::test[aggregate-aggrs_no_grouping--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000bd9/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000bd9/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 366814 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/0008c8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-05-05T03:06:02.142292Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-05-05T03:06:04.100900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500792582698869115:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:06:04.100933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000318/r3tmp/tmp6AVBLZ/pdisk_1.dat 2025-05-05T03:06:04.163010Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16509 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:06:04.205137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:06:04.205168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:06:04.206236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:06:04.239227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:06:04.257206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:06:06.729943Z node 1 :TX_DATASHARD ERROR: Transaction read size 51002245 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-05-05T03:06:06.730268Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002245 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-05-05T03:06:06.730326Z node 1 :TX_PROXY ERROR: Actor# [1:7500792591288805318:2905] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-05-05T03:06:06.991713Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500792591439839113:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:06:06.991744Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000318/r3tmp/tmpB5Uk2A/pdisk_1.dat 2025-05-05T03:06:07.007121Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17495 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:06:07.095838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:06:07.095874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:06:07.096262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:06:07.096872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:06:07.102581Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:06:07.111284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:06:09.438070Z node 2 :TX_DATASHARD ERROR: Transaction read size 51002101 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-05-05T03:06:09.438117Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002101 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-05-05T03:06:09.438150Z node 2 :TX_PROXY ERROR: Actor# [2:7500792604324742606:2904] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-05-05T03:06:09.713625Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500792602704641698:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000318/r3tmp/tmpyG0NmG/pdisk_1.dat 2025-05-05T03:06:09.721509Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:06:09.728851Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4328 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:06:09.818265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:06:09.818306Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:06:09.818798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:06:09.819249Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:06:09.827558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:06:10.742657Z node 3 :TX_PROXY DEBUG: actor# [3:7500792602704641699:2086] Handle TEvProposeTransaction 2025-05-05T03:06:10.742679Z node 3 :TX_PROXY DEBUG: actor# [3:7500792602704641699:2086] TxId# 281474976715700 ProcessProposeTransaction 2025-05-05T03:06:10.742689Z node 3 :TX_PROXY DEBUG: actor# [3:7500792602704641699:2086] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7500792606999609959:2596] DataReq marker# P0 2025-05-05T03:06:10.742708Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-05-05T03:06:10.742796Z node 3 :TX_PROXY DEBUG: Actor [3:7500792606999609959:2596] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-05-05T03:06:10.742806Z node 3 :TX_PROXY DEBUG: Actor [3:7500792606999609959:2596] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-05-05T03:06:10.742811Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] txid# 281474976715700 SEND to# [3:7500792602704641814:2113] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-05-05T03:06:10.742850Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-05-05T03:06:10.743109Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-05-05T03:06:10.743176Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-05-05T03:06:10.743262Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:06:10.743264Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:06:10.743517Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-05-05T03:06:10.743517Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-05-05T03:06:10.744233Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T03:06:10.744234Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T03:06:10.744277Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000479 out readset size 0 marker# P6 2025-05-05T03:06:10.744293Z node 3 :TX_PROXY DEBUG: Actor# [3:7500792606999609959:2596] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000887 out readset size 0 marker# P6 2025-05-05T03:06:10.744307Z node 3 :TX_PROXY ERROR: Actor# [3:7500792606999609959:2596] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001366 exceeded limit 10000 Status# ExecError 2025-05-05T03:06:10.744347Z node 3 :TX_PROXY ERROR: Actor# [3:7500792606999609959:2596] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-05-05T03:06:10.744374Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-05-05T03:06:10.744376Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-05-05T03:06:10.744386Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-05-05T03:06:10.744388Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] >> test.py::test[binding-named_node_corr_names-default.txt-Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test [GOOD] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] >> test.py::test[insert-double_append_to_anonymous--Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith29Cpu::test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] >> test.py::test[schema-copy-read_schema-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=343768) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test.py::test[action-action_eval_cluster_table_for--Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] >> test.py::test[lambda-lambda_simple-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-ForceBlocks] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint8--Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] >> test.py::test[insert-double_append_to_anonymous--Results] [GOOD] >> test.py::test[insert-replace_inferred--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/0008c5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-05-05T03:06:06.542526Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-05-05T03:06:06.542512Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T03:06:06.521511Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] >> test.py::test[join-opt_on_opt_side--Results] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[file-file_list_simple--ForceBlocks] >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] >> test.py::test[lambda-lambda_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] >> test.py::test[action-action_eval_cluster_table_for--Results] [GOOD] >> test.py::test[action-eval_drop--Results] >> test.py::test[schema-copy-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-copy-read_schema-Results] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [GOOD] >> test.py::test[column_group-hint_non_str_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_str_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-insert--ForceBlocks] >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--ForceBlocks] >> test.py::test[produce-reduce_all_expr-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream--ForceBlocks] >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[schema-insert-schema-ForceBlocks] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-ForceBlocks] >> test.py::test[insert-replace_inferred--ForceBlocks] [GOOD] >> test.py::test[insert-replace_inferred--Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--ForceBlocks] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith29Cpu::test [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] [GOOD] >> test.py::test[sampling-map-dynamic-ForceBlocks] >> test.py::test[action-action_eval_cluster_table--ForceBlocks] >> test.py::test[action-eval_drop--Results] [GOOD] >> test.py::test[action-eval_for-default.txt-Results] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_delay--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star--Results] [SKIPPED] >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_pg[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=355434) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] >> test.py::test[file-file_list_simple--ForceBlocks] [GOOD] >> test.py::test[file-file_list_simple--Results] >> test.py::test[aggregate-group_by_hop_zero_delay--Results] [SKIPPED] >> test.py::test[blocks-bitcast_scalar--Results] [GOOD] >> test.py::test[blocks-boolean_ops--Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.331288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.331318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.331323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.331329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.331335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.331338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.331347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.331362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.331454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.331534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.347091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.347118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.347218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.348724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.348757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.348780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.349395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.349430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.349498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.349526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.349817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.349980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.349986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.350022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.350026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.350033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.350050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.351345Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.389277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.389367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.389429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.389500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.389515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.390330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.390347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.390351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.390736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.390750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.391054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.391064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.391069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.391076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.391644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.392069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.392104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.392270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.392292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.392300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.392355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.392361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.392388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.392398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.392751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.392759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.392798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.392804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.392813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.392819Z node 1 :FLAT_TX_SCHEMESHARD I ... TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:06:17.039200Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T03:06:17.039350Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.039362Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.039366Z node 263 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:06:17.039370Z node 263 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2025-05-05T03:06:17.039374Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:06:17.039384Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T03:06:17.039900Z node 263 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:06:17.039925Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T03:06:17.039930Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T03:06:17.039934Z node 263 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T03:06:17.040038Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-05T03:06:17.040055Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:06:17.040114Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2025-05-05T03:06:17.040329Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:06:17.040346Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 1129576401004 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:06:17.040352Z node 263 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2025-05-05T03:06:17.040371Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T03:06:17.040379Z node 263 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T03:06:17.040383Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:06:17.040387Z node 263 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T03:06:17.040390Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:06:17.040397Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:06:17.040405Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:06:17.040410Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-05T03:06:17.040416Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:06:17.040419Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-05T03:06:17.040423Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-05-05T03:06:17.040431Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:06:17.040436Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-05T03:06:17.040440Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-05T03:06:17.040444Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T03:06:17.040540Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.040934Z node 263 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:06:17.040953Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:06:17.040981Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T03:06:17.041005Z node 263 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:06:17.041010Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [263:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-05T03:06:17.041016Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [263:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-05T03:06:17.041150Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.041162Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.041167Z node 263 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:06:17.041172Z node 263 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-05T03:06:17.041176Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T03:06:17.041334Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.041347Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.041351Z node 263 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:06:17.041356Z node 263 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T03:06:17.041360Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:06:17.041374Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-05T03:06:17.041379Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [263:123:2149] 2025-05-05T03:06:17.041420Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:06:17.041425Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:06:17.041434Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:06:17.041862Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.042113Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:17.042131Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-05T03:06:17.042140Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-05-05T03:06:17.042148Z node 263 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:06:17.042152Z node 263 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-05T03:06:17.042156Z node 263 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 1004, itemIdx# 4294967295 2025-05-05T03:06:17.042220Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:06:17.042491Z node 263 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:06:17.042537Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:06:17.042543Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:06:17.042598Z node 263 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:06:17.042612Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:06:17.042616Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [263:982:2916] TestWaitNotification: OK eventTxId 1004 >> test.py::test[union_all-union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-file_outer--Results] >> test.py::test[join-join_and_distinct_key-off-ForceBlocks] |83.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[aggregate-group_by_hop_zero_delay--Results] [SKIPPED] >> test.py::test[pg-tpcds-q26-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Results] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--ForceBlocks] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] >> test.py::test[file-file_list_simple--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--ForceBlocks] >> test.py::test[insert_monotonic-non_existing_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk--ForceBlocks] >> test.py::test[action-eval_if_guard-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test.py::test[limit-limit_skip_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--Results] >> test.py::test[schema-insert-schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert-schema-Results] >> test.py::test[join-premap_common_multiparents--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] >> test_actorsystem.py::TestWithComputeNodeWith39Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-ForceBlocks] >> test.py::test[action-eval_for-default.txt-Results] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-ForceBlocks] >> test.py::test[limit-limit_skip_take-default.txt-Results] [GOOD] >> test.py::test[lineage-list_literal3-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Results] >> test.py::test[lineage-list_literal3-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset--ForceBlocks] >> test.py::test[sampling-map-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-map-dynamic-Results] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] >> test.py::test[action-action_eval_cluster_table--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_table--Results] >> test.py::test[schema-insert-schema-Results] [GOOD] >> test.py::test[schema-read_schema_other--ForceBlocks] >> test.py::test[pg-tpcds-q26-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[view-secure--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=369726) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] >> test.py::test[column_order-insert--ForceBlocks] [GOOD] >> test.py::test[column_order-insert--Results] >> test.py::test[blocks-boolean_ops--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Results] >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[schema-skip_complex_type2--ForceBlocks] >> test.py::test[action-action_eval_cluster_table--Results] [GOOD] >> test.py::test[action-eval_filter--ForceBlocks] >> test.py::test[join-join_and_distinct_key-off-ForceBlocks] [GOOD] >> test.py::test[join-join_and_distinct_key-off-Results] [SKIPPED] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] >> test.py::test[file-where_key_in_get_file_content--ForceBlocks] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Results] >> test.py::test[join-equi_join_three_asterisk--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Results] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] >> test.py::test[join-premap_common_multiparents--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-ForceBlocks] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[view-trivial_view_concat--Results] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=354935) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:05:47] send response localhost:20753/?database=local ::1 - - [05/May/2025 03:05:47] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[action-subquery_merge1-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[in-in_sorted_by_tuple--Results] >> test.py::test[column_order-insert--Results] [GOOD] >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Results] >> test.py::test[file-where_key_in_get_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--ForceBlocks] >> test.py::test[tpch-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] >> test.py::test[action-subquery_merge1-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-ForceBlocks] >> test.py::test[schema-read_schema_other--ForceBlocks] [GOOD] >> test.py::test[schema-read_schema_other--Results] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] >> test.py::test[optimizers-combinebykey_fields_subset--ForceBlocks] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--Results] >> test.py::test[blocks-combine_all_max_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] >> test_actorsystem.py::TestWithComputeNodeWith2Cpu::test >> test.py::test[join-equi_join_three_asterisk--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[tpch-q8-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] >> test.py::test[schema-read_schema_other--Results] [GOOD] >> test.py::test[schema-user_schema_override--ForceBlocks] >> test.py::test[join-equi_join_three_asterisk-off-ForceBlocks] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> test.py::test[schema-skip_complex_type2--ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type2--Results] >> test.py::test[action-eval_filter--ForceBlocks] [GOOD] >> test.py::test[action-eval_filter--Results] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--Results] [GOOD] >> test.py::test[optimizers-keepworld_emptyflatmap--ForceBlocks] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_cast_to_string--ForceBlocks] >> test.py::test[pg-tpcds-q31-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] >> test.py::test[action-eval_table_with_view-default.txt-Results] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-Results] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[produce-reduce_with_assume--Results] >> test.py::test[produce-reduce_with_assume--Results] [SKIPPED] >> test.py::test[ql_filter-integer_bounds--Results] >> test.py::test[join-premap_common_multiparents_no_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] >> test.py::test[flatten_by-flatten_by_typed_table--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Results] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_with_remap--ForceBlocks] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] [GOOD] >> test.py::test[schema-skip_complex_type2--Results] [GOOD] >> test.py::test[select-anon_clash--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] [GOOD] >> test.py::test[order_by-limit--ForceBlocks] >> test_actorsystem.py::TestWithComputeNodeWith2Cpu::test [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_all--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all--Results] [SKIPPED] >> test.py::test[hor_join-skip_sampling--ForceBlocks] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--Results] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--Results] >> test.py::test[select-anon_clash--ForceBlocks] [GOOD] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-ForceBlocks] >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] >> test.py::test[aggr_factory-bitor-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] >> test.py::test[pg-tpcds-q34-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-Results] >> test.py::test[optimizers-keepworld_emptyflatmap--ForceBlocks] [GOOD] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[window-current/aggregations--Results] >> test_actorsystem.py::TestWithComputeNodeWith30Cpu::test >> test.py::test[schema-user_schema_override--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_override--Results] >> test.py::test[blocks-combine_all_sum--Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Results] >> test.py::test[blocks-combine_hashed_count_filter--Results] >> test.py::test[ql_filter-integer_bounds--Results] [GOOD] >> test.py::test[ql_filter-integer_many_left--Results] >> test.py::test[join-left_cast_to_string--ForceBlocks] [GOOD] >> test.py::test[join-left_cast_to_string--Results] >> test.py::test[join-equi_join_three_asterisk-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-Results] [SKIPPED] >> test.py::test[join-inner_all-off-ForceBlocks] >> test.py::test[action-runtime_if_select-default.txt-Results] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-ForceBlocks] >> test.py::test[action-eval_table_with_view-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith39Cpu::test [GOOD] >> test.py::test[order_by-limit--ForceBlocks] [GOOD] >> test.py::test[order_by-limit--Results] >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[count-count--ForceBlocks] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-ForceBlocks] >> test.py::test[blocks-nested_optionals--Results] >> test.py::test[join-premap_merge_with_remap--ForceBlocks] [GOOD] >> test.py::test[tpch-q8-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_with_remap--Results] >> test.py::test[tpch-q8-default.txt-Results] >> test.py::test[hor_join-skip_sampling--ForceBlocks] [GOOD] >> test.py::test[hor_join-skip_sampling--Results] >> test.py::test[action-eval_table_with_view-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[order_by-yql-19598--Results] >> test.py::test[join-left_cast_to_string--Results] [GOOD] >> test.py::test[join-left_semi_with_other--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] >> test.py::test[pg-tpcds-q94-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] >> test.py::test[select-complex_filter_with_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-limit--Results] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-ForceBlocks] >> test.py::test[select-complex_filter_with_order-default.txt-Results] >> test.py::test[pg-tpcds-q35-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-Results] >> test.py::test[ql_filter-integer_many_left--Results] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--Results] >> test.py::test[optimizers-yql-6133_skip_deps--ForceBlocks] >> test.py::test[join-inner_all-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_all-off-Results] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps--Results] [SKIPPED] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Results] >> test.py::test[join-join_table_conflict_fail--ForceBlocks] >> test.py::test[order_by-SortByOneField--ForceBlocks] >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_count_filter--Results] [GOOD] >> test.py::test[blocks-date_equals--Results] >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test.py::test[select-dict_lookup_by_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] >> test.py::test[select-complex_filter_with_order-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-ForceBlocks] >> test.py::test[join-premap_merge_with_remap--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--ForceBlocks] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] >> test.py::test[tpch-q8-default.txt-Results] [GOOD] >> test.py::test[blocks-nested_optionals--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] >> test.py::test[type_v3-ignore_v3_pragma--ForceBlocks] >> test.py::test[order_by-yql-19598--Results] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[join-join_table_conflict_fail--ForceBlocks] [GOOD] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--ForceBlocks] >> test.py::test[pg-tpcds-q99-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] >> test.py::test[pg-tpch-q01-default.txt-Results] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_key_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> test.py::test[ql_filter-integer_many_noskiff--Results] [GOOD] >> test.py::test[ql_filter-integer_members_eval--Results] >> test.py::test[action-subquery_opt_args-default.txt-Results] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith30Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] >> test.py::test[join-left_semi_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other--Results] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-ForceBlocks] >> test.py::test[agg_apply-table--Results] >> test.py::test[pg-join_using_tables2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Results] >> test.py::test[order_by-SortByOneField--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneField--Results] >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] >> test.py::test[aggr_factory-bottom-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--ForceBlocks] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] >> test.py::test[count-count--ForceBlocks] [GOOD] >> test.py::test[select-exists_false-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q48-default.txt-Results] >> test.py::test[aggregate-group_by_session_aliases--Results] >> test.py::test[count-count--Results] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-ForceBlocks] >> test.py::test[type_v3-ignore_v3_pragma--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Results] >> test.py::test[pg-join_using_tables2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-ForceBlocks] >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> test.py::test[insert-insert_from_other--Results] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] >> test.py::test[ql_filter-integer_members_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_optional--Results] >> test.py::test[select-exists_false-default.txt-Results] [GOOD] >> test.py::test[select-refselect--ForceBlocks] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] >> test.py::test[insert-anonymous_tables-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Results] >> test.py::test[join-join_without_correlation_and_struct_access--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Results] >> test_stop.py::TestStop::test_stop_query[v1-analytics] >> test.py::test[type_v3-ignore_v3_pragma--Results] [GOOD] >> test.py::test[view-secure--ForceBlocks] >> test.py::test[agg_apply-table--Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Results] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] >> test.py::test[aggr_factory-bottom-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-ForceBlocks] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] [GOOD] >> test.py::test[select-where_in-default.txt-ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-ForceBlocks] >> test.py::test[pg-tpcds-q48-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-Results] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[blocks-top_sort_one_asc--Results] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] >> test.py::test[insert-anonymous_tables-default.txt-Results] [GOOD] >> test.py::test[insert-append_view_fail--ForceBlocks] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] >> test.py::test[view-secure--ForceBlocks] [GOOD] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--ForceBlocks] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--ForceBlocks] >> test.py::test[join-join_without_correlation_and_struct_access--Results] [GOOD] >> test.py::test[join-left_trivial--ForceBlocks] >> test.py::test[insert-insert_from_other--Results] [GOOD] >> test.py::test[insert-override_view_fail--Results] >> test.py::test[pg-tpcds-q04-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-Results] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] >> test.py::test[ql_filter-integer_optional--Results] [GOOD] >> test.py::test[ql_filter-integer_select_other--Results] >> test.py::test[select-refselect--ForceBlocks] [GOOD] >> test.py::test[select-refselect--Results] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-compare_by_tuple--ForceBlocks] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] [GOOD] >> test.py::test[order_by-singular-default.txt-ForceBlocks] >> test.py::test[insert-append_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-append_view_fail--Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-ForceBlocks] >> test.py::test[key_filter-split_input_with_key_filter2--Results] [SKIPPED] >> test.py::test[key_filter-utf8_with_legacy--Results] >> test.py::test[count-count--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_aliases--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] >> test.py::test[insert-override_view_fail--Results] [GOOD] >> test.py::test[insert-replace_inferred--Results] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] >> test.py::test[in-yql-10038-default.txt-Results] >> test.py::test[pg-tpch-q01-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Results] [SKIPPED] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] >> test.py::test[select-refselect--Results] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q60-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Results] >> test.py::test[pg-tpcds-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-ForceBlocks] >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] >> test.py::test[select-where_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-where_in-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-ForceBlocks] [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Results] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--Results] [SKIPPED] >> test.py::test[json-json_query/example--ForceBlocks] >> test.py::test[weak_field-weak_field_in_group_by--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Results] >> test.py::test[blocks-date_equals--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] >> test.py::test[blocks-top_sort_one_asc--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] >> test.py::test[select-where_in-default.txt-Results] [GOOD] >> test.py::test[select-where_with_lambda--ForceBlocks] >> test.py::test[ql_filter-integer_select_other--Results] [GOOD] >> test.py::test[result_types-singular-default.txt-Results] >> test.py::test[join-left_trivial--ForceBlocks] [GOOD] >> test.py::test[join-left_trivial--Results] >> test.py::test[aggr_factory-bitxor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-Results] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=361704) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[weak_field-weak_field_in_group_by--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--ForceBlocks] >> test.py::test[insert-drop_sortness-desc-ForceBlocks] [GOOD] >> test.py::test[insert-drop_sortness-desc-Results] >> test_actorsystem.py::TestWithComputeNodeWith3Cpu::test >> test.py::test[pg-tpcds-q13-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Results] >> test.py::test[order_by-singular-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-singular-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[pg-tpcds-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Results] >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> test.py::test[join-equi_join_by_expr-off-ForceBlocks] >> test.py::test[select-select_all_from_concat_anon-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] >> test.py::test[join-left_trivial--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-ForceBlocks] >> test.py::test[pg-tpcds-q70-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Results] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] >> test.py::test[order_by-singular-default.txt-Results] [GOOD] >> test.py::test[pg-aggregate_combine--ForceBlocks] >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] >> test.py::test[pg-tpcds-q13-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-ForceBlocks] >> test.py::test[result_types-singular-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-Results] >> test.py::test[pg-tpcds-q96-default.txt-ForceBlocks] >> test.py::test[aggregate-compare_by_tuple--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Results] >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--ForceBlocks] >> test.py::test[select-where_with_lambda--ForceBlocks] [GOOD] >> test.py::test[select-where_with_lambda--Results] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-ForceBlocks] >> test.py::test[in-yql-10038-default.txt-Results] [GOOD] >> test.py::test[insert-append_proto_fail--Results] >> test.py::test[blocks-type_and_callable_stats--Results] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test_actorsystem.py::TestWithComputeNodeWith3Cpu::test [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] >> test.py::test[json-json_query/example--ForceBlocks] [GOOD] >> test.py::test[json-json_query/example--Results] >> test.py::test[produce-process_and_filter-default.txt-Results] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-Results] >> test.py::test[select-where_with_lambda--Results] [GOOD] >> test.py::test[table_range-range_over_filter_udf--ForceBlocks] >> test.py::test[pg-tpcds-q78-default.txt-Results] [GOOD] >> test.py::test[insert-append_proto_fail--Results] [GOOD] >> test.py::test[insert-drop_sortness--Results] >> test.py::test[pg-tpcds-q85-default.txt-Results] >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[window-full/aggregations--Results] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Results] [SKIPPED] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-inner_all_right--Results] >> test.py::test[join-equi_join_by_expr-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns2-off-ForceBlocks] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] >> test.py::test[pg-tpcds-q96-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q96-default.txt-Results] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit--Results] [SKIPPED] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_union_all-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_no_aggregate_split--Results] >> test.py::test[join-lookupjoin_inner_1o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_sharded-default.txt-ForceBlocks] >> test.py::test[window-empty/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] >> test.py::test[aggr_factory-min_by-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] >> test.py::test[json-json_query/example--Results] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--ForceBlocks] >> test.py::test[aggregate-percentiles_ungrouped--Results] >> test.py::test[pg-tpcds-q35-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-Results] >> test.py::test[pg-aggregate_combine--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine--Results] >> test.py::test[order_by-SortByTwoFieldsDesc--ForceBlocks] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[aggregate-compare_by_tuple--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--ForceBlocks] >> test.py::test[sampling-bind_topsort-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read-dynamic-Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-length-single-Results] [SKIPPED] >> test.py::test[column_group-respull--Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> test.py::test[select-select_all_ordered-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[aggregate-group_by_session_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] >> test.py::test[pg-tpcds-q96-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q03-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] |84.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[order_by-SortByTwoFieldsDesc--ForceBlocks] [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed >> test.py::test[pg-tpcds-q35-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-ForceBlocks] >> test.py::test[insert-drop_sortness--Results] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Results] >> test.py::test[produce-process_row_and_columns-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-Results] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] >> test.py::test[pg-aggregate_combine--Results] [GOOD] >> test.py::test[pg-select_starref1-default.txt-ForceBlocks] >> test.py::test[table_range-range_over_filter_udf--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_filter_udf--Results] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-ForceBlocks] >> test.py::test[table_range-range_over_filter_udf--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--ForceBlocks] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test.py::test[join-mapjoin_sharded-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Results] >> test.py::test[key_filter-empty_range_over_dynamic--ForceBlocks] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Results] >> test.py::test[join-flatten_columns2-off-ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns2-off-Results] [SKIPPED] >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[join-inner_with_select--Results] >> test.py::test[aggregate-percentiles_ungrouped--Results] [GOOD] >> test.py::test[aggregate-subquery_aggregation--ForceBlocks] >> test.py::test[sampling-direct_read-dynamic-Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_opt--Results] >> test.py::test[sampling-read-dynamic-Results] |84.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-flatten_columns2-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q39-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Results] >> test.py::test[insert-insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] >> test.py::test[aggregate-group_by_gs_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Results] >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-tzdate--ForceBlocks] >> test.py::test[pg-select_starref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] >> test.py::test[produce-process_streaming_count-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] >> test.py::test[join-mapjoin_sharded-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[pg-tpch-q03-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q03-default.txt-Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] >> test.py::test[pg-tpcds-q39-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] |84.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-ForceBlocks] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] >> test_actorsystem.py::TestWithComputeNodeWith4Cpu::test >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] [SKIPPED] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] |84.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval--ForceBlocks] >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533--Results] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-interval_add_date--Results] >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] >> test.py::test[window-row_number_to_map_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] >> test.py::test[sampling-read-dynamic-Results] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Results] >> test.py::test[weak_field-weak_field_opt--Results] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--ForceBlocks] >> test.py::test[aggregate-subquery_aggregation--ForceBlocks] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q21-default.txt-Results] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] >> test.py::test[join-inner_with_select--Results] [GOOD] >> test.py::test[join-inner_with_select-off-Results] [SKIPPED] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[pg-tpch-q03-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-duplicated_rowspec--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--ForceBlocks] >> test.py::test[produce-process_with_udf-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Results] >> test.py::test[column_order-select_where-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Results] >> test.py::test[table_range-table_funcs_expr--ForceBlocks] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[window-full/aggregations--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Results] >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> test.py::test[pg-tpcds-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] [GOOD] >> test.py::test[window-current/aggregations_leadlag--Results] >> test_actorsystem.py::TestWithComputeNodeWith4Cpu::test [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] [GOOD] >> test.py::test[window-udaf_window--ForceBlocks] >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-ForceBlocks] >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] >> test.py::test[select-logical_ops-default.txt-Results] >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-several1-default.txt-Results] [GOOD] >> test.py::test[join-bush_dis_in-off-Results] [SKIPPED] >> test.py::test[join-count_bans-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval--Results] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-ForceBlocks] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] >> test.py::test[join-lookupjoin_bug8533--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Results] >> test.py::test[pg_duplicated-duplicated_rowspec--ForceBlocks] [GOOD] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-table_content--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.444919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.444942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.444946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.444951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.444956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.444960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.444968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.444980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.445071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.445130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.456627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.456643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.456710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.460491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.460521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.460544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.462127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.462160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.462264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.462300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.462653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.462852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.462861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.462903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.462910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.462916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.462942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.464086Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.480789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.480851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.480898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.480986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.480997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.481481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.481503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.481544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.481565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.481570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.481574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.481898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.481907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.481911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.482171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.482180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.482184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.482189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.482696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.482997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.483025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.483200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.483221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.483228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.483281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.483287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.483314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.483325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.483618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.483624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.483650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.483653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.483660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.483665Z node 1 :FLAT_TX_SCHEMESHARD I ... 046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.301940Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.301944Z node 342 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:06:52.301948Z node 342 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2025-05-05T03:06:52.301951Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:06:52.301961Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T03:06:52.302006Z node 342 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:06:52.302062Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-05T03:06:52.302084Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:06:52.302110Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T03:06:52.302113Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T03:06:52.302118Z node 342 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2025-05-05T03:06:52.302164Z node 342 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:06:52.302178Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 1468878817389 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:06:52.302184Z node 342 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2025-05-05T03:06:52.302216Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T03:06:52.302224Z node 342 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T03:06:52.302228Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:06:52.302232Z node 342 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T03:06:52.302235Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:06:52.302242Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:06:52.302249Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:06:52.302254Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-05T03:06:52.302260Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:06:52.302264Z node 342 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-05T03:06:52.302267Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-05-05T03:06:52.302274Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T03:06:52.302279Z node 342 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-05T03:06:52.302282Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-05T03:06:52.302285Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T03:06:52.302760Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.302831Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T03:06:52.302838Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409548 2025-05-05T03:06:52.302854Z node 342 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:06:52.303125Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.303171Z node 342 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:06:52.303176Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:06:52.303199Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T03:06:52.303219Z node 342 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:06:52.303224Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [342:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-05T03:06:52.303229Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [342:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-05T03:06:52.303342Z node 342 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.303351Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.303355Z node 342 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:06:52.303359Z node 342 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-05T03:06:52.303363Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T03:06:52.303461Z node 342 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.303469Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.303473Z node 342 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:06:52.303477Z node 342 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T03:06:52.303481Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T03:06:52.303490Z node 342 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-05T03:06:52.303497Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [342:123:2149] 2025-05-05T03:06:52.303540Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:06:52.303545Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:06:52.303552Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:06:52.303884Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.304149Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:06:52.304165Z node 342 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-05T03:06:52.304174Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-05-05T03:06:52.304182Z node 342 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:06:52.304185Z node 342 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-05T03:06:52.304190Z node 342 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 1004, itemIdx# 4294967295 2025-05-05T03:06:52.304231Z node 342 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:06:52.304491Z node 342 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:06:52.304532Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:06:52.304539Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:06:52.304592Z node 342 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:06:52.304605Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:06:52.304610Z node 342 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [342:978:2912] TestWaitNotification: OK eventTxId 1004 >> test.py::test[key_filter-tzdate--ForceBlocks] [GOOD] >> test.py::test[key_filter-tzdate--Results] >> test.py::test[aggregate-group_by_ru_join--Results] >> test.py::test[aggregate-aggregation_with_named_node--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] >> test.py::test[column_group-hint_anon-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Results] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_desc--Results] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] >> test.py::test[blocks-interval_add_date--Results] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-Results] >> test.py::test[select-logical_ops-default.txt-Results] [GOOD] >> test.py::test[select-match_clause--Results] >> test.py::test[binding-table_from_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> test.py::test[aggregate-aggregation_with_named_node--Results] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-Results] >> test.py::test[binding-table_from_binding-default.txt-Results] >> test.py::test[aggregate-aggregation_with_named_node--Results] [GOOD] >> test.py::test[aggregate-compare_by--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> test_actorsystem.py::TestWithComputeNodeWith5Cpu::test |84.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[blocks-interval_add_date--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] >> test.py::test[window-udaf_window--ForceBlocks] [GOOD] >> test.py::test[window-udaf_window--Results] >> test.py::test[pg-tpcds-q93-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-Results] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint-opt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] >> test.py::test[binding-table_from_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int32--ForceBlocks] >> test.py::test[key_filter-tzdate--Results] [GOOD] >> test.py::test[key_filter-yql-14157--ForceBlocks] >> test.py::test[pg-tpcds-q23-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-Results] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side--ForceBlocks] >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] >> test.py::test[pg-tpch-q20-default.txt-Results] [GOOD] >> test.py::test[pg-wide_top_sort--Results] >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[produce-process_sorted_desc_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_streaming-default.txt-ForceBlocks] >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test.py::test[join-lookupjoin_semi_2o--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] >> test.py::test[sampling-table_content--Results] [GOOD] >> test.py::test[schema-copy-other-Results] >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--ForceBlocks] >> test.py::test[window-udaf_window--Results] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q23-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-ForceBlocks] >> test.py::test[select-match_clause--Results] [GOOD] >> test.py::test[select-swap_columns-default.txt-Results] >> test_actorsystem.py::TestWithComputeNodeWith5Cpu::test [GOOD] >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> test.py::test[aggregate-group_by_session_star--ForceBlocks] >> test.py::test[window-current/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-current/ansi_current--Results] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] |84.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[join-mapjoin_with_empty_read--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary--ForceBlocks] >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[blocks-add_int32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int32--Results] >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-Results] [GOOD] >> test.py::test[produce-reduce_typeinfo--Results] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--Results] [SKIPPED] >> test.py::test[sampling-bind_expr_udf--Results] >> test.py::test[aggregate-avg_interval-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Results] >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] >> test.py::test[distinct-distinct_one_count-default.txt-Results] [GOOD] >> test.py::test[dq-pool_trees_whitelist--Results] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Results] >> test.py::test[join-equi_join_three_asterisk_eval--Results] [GOOD] >> test.py::test[join-equi_join_three_simple--Results] >> test.py::test[join-opt_on_opt_side--ForceBlocks] [GOOD] >> test.py::test[epochs-use_and_drop_anonymous--Results] [SKIPPED] >> test.py::test[expr-constraints_of--Results] >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-bare_yson--Results] [SKIPPED] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> test.py::test[key_filter-yql-14157--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-14157--Results] >> test.py::test[aggregate-native_desc_group_compact_by--Results] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] >> test.py::test[schema-copy-other-Results] [GOOD] >> test.py::test[schema-diffrerent_schemas--Results] >> test.py::test[blocks-add_int32--Results] [GOOD] >> test.py::test[blocks-bitcast_scalar--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error >> test.py::test[pg-wide_top_sort--Results] [GOOD] >> test.py::test[pragma-config_exec--Results] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-Results] >> test.py::test[like-regexp_clause--Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] >> test.py::test[pg-tpcds-q34-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] >> test.py::test[select-swap_columns-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled_1000--Results] >> test.py::test[produce-reduce_multi_in_keytuple--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-opt_on_opt_side--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by--Results] >> test.py::test[join-mapjoin_early_rewrite_star--Results] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_both_sides--Results] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] >> test.py::test[join-mergejoin_big_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary--Results] >> test.py::test[pg-tpcds-q34-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_key_check--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--Results] >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test [GOOD] >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Results] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-ForceBlocks] >> test.py::test[blocks-struct_type--Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] [GOOD] >> test.py::test[schema-diffrerent_schemas--Results] [GOOD] >> test.py::test[schema-read_schema_change_other--Results] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] >> test.py::test[blocks-bitcast_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] >> test.py::test[join-mergejoin_big_primary--Results] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-ForceBlocks] >> test.py::test[like-regexp_clause--Results] [GOOD] >> test.py::test[limit-dynamic_sort_limit--Results] [SKIPPED] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] [GOOD] >> test.py::test[order_by-changed_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-literal_complex--Results] [SKIPPED] >> test.py::test[order_by-sort--Results] >> test.py::test[aggregate-group_by_session_star--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_star--Results] >> test.py::test[pragma-config_exec--Results] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] [SKIPPED] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] >> test.py::test[select-unlabeled_1000--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] >> test.py::test[aggregate-avg_with_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] >> test.py::test[window-current/ansi_current--Results] [GOOD] >> test.py::test[window-distinct_over_window_full_frames--Results] >> test.py::test[aggregate-compare_by--Results] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-ForceBlocks] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] >> test.py::test[schema-read_schema_change_other--Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] >> test.py::test[aggregate-group_by_rollup_key_check--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join--ForceBlocks] >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> test.py::test[blocks-bitcast_scalar--Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--ForceBlocks] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] >> test.py::test[pg-tpcds-q38-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] >> test.py::test[expr-yql-10180-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[sampling-map--Results] >> test.py::test[join-equi_join_three_simple--Results] [GOOD] >> test.py::test[join-full_equal_not_null-off-Results] [SKIPPED] >> test.py::test[join-full_join--Results] >> test.py::test[aggregate-group_by_session_star--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-ForceBlocks] >> test.py::test[join-opt_on_opt_side--Results] >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_simple--Results] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-insert_fill--Results] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test [GOOD] >> test.py::test[order_by-sort--Results] [GOOD] >> test.py::test[pg-in_mixed--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Results] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] [GOOD] >> test.py::test[count-count_all_grouped-empty-Results] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_optional_null--ForceBlocks] >> test.py::test[aggregate-group_by_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt-Results] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_by--Results] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-lagging/aggregations--Results] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] >> test.py::test[join-pullup_left--ForceBlocks] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--Results] >> test.py::test[join-mergejoin_force_one_sorted--Results] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-many_inserts--Results] [SKIPPED] >> test.py::test[column_order-insert_tmp-default.txt-Results] >> test.py::test[count-count_all_grouped-empty-Results] [GOOD] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-single-Results] [SKIPPED] >> test.py::test[column_group-hint-single-Results] [SKIPPED] >> test.py::test[column_group-hint_append--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--Results] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-Results] >> test.py::test[action-evaluate_match_type-default.txt-Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[aggregate-group_by_ru_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-ForceBlocks] >> test.py::test[sampling-map--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] >> test.py::test[sampling-reduce_with_presort--Results] [SKIPPED] >> test.py::test[sampling-sample-default.txt-Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_left--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--Results] [SKIPPED] >> test.py::test[join-order_of_qualified--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] >> test.py::test[blocks-combine_all_avg_filter--Results] [GOOD] >> test.py::test[blocks-complex_scalars--ForceBlocks] >> test.py::test[pg-in_mixed--Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] >> test.py::test[schema-user_schema_bind-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] >> test.py::test[pg-tpcds-q49-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter--ForceBlocks] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] >> test.py::test[join-premap_common_inner--Results] >> test.py::test[join-full_join--Results] [GOOD] >> test.py::test[join-inner_all-off-Results] [SKIPPED] >> test.py::test[join-inner_grouped-off-Results] >> test.py::test[flatten_by-flatten_few_fields--Results] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test.py::test[join-join_left_cbo--Results] >> test.py::test[tpch-q8-default.txt-Results] >> test.py::test[produce-reduce_all-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] >> test.py::test[ql_filter-integer_optional_null--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_optional_null--Results] >> test.py::test[join-premap_common_multiparents_no_premap--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] >> test.py::test[lineage-flatten_by--Results] [GOOD] >> test.py::test[lineage-member_over_if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-map_force--Results] [SKIPPED] >> test.py::test[multicluster-pull-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test.py::test[join-pullup_left--ForceBlocks] [GOOD] >> test.py::test[join-pullup_left--Results] >> test.py::test[hor_join-merge_multiouts_reuse--Results] >> test.py::test[action-evaluate_match_type-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_queries--Results] >> test.py::test[join-mergejoin_force_one_sorted--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_reuse--Results] [SKIPPED] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> test.py::test[join-mergejoin_left_null_column--Results] >> test.py::test[ql_filter-integer_optional_null--Results] [GOOD] >> test.py::test[ql_filter-integer_single_equals--ForceBlocks] >> test.py::test[schema-user_schema_existing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix3--Results] >> test.py::test[aggregate-group_by_hop_static-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_ru--ForceBlocks] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] >> test.py::test[column_order-insert_tmp-default.txt-Results] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols--Results] >> test.py::test[sampling-sample-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-Results] >> test.py::test[blocks-complex_scalars--ForceBlocks] [GOOD] >> test.py::test[blocks-complex_scalars--Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel_indep--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] >> test.py::test[flatten_by-flatten_with_join--Results] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] >> test.py::test[column_order-winfunc-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[action-eval_if-default.txt-Results] [GOOD] >> test.py::test[action-eval_sample--Results] >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[join-pullup_left-off-ForceBlocks] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-order_of_qualified-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_filter-off-Results] [SKIPPED] >> test.py::test[join-premap_map_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi--Results] >> test.py::test[blocks-combine_all_count_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count_filter--Results] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume--Results] [SKIPPED] >> test.py::test[pg-tpch-q08-default.txt-Results] >> test.py::test[join-join_left_cbo--Results] [GOOD] >> test.py::test[join-left_trivial--Results] >> test.py::test[action-evaluate_queries--Results] [GOOD] >> test.py::test[action-export_action--Results] >> test.py::test[lineage-select_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-extend-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] >> test.py::test[blocks-complex_scalars--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--ForceBlocks] >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-keep_sort_with_renames--Results] >> test.py::test[blocks-combine_all_count_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--ForceBlocks] >> test.py::test[join-premap_common_inner--Results] [GOOD] >> test.py::test[join-premap_context_dep-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2--Results] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_simple--Results] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Results] [SKIPPED] >> test.py::test[aggregate-percentiles_ungrouped--Results] >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestAppendEncodedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedTextExistingBuffer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanner] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayValueBackend] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestArrayValuer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBadConn] [GOOD] >> test.py::test[ql_filter-integer_single_equals--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single_equals--Results] >> test.py::test[schema-user_schema_mix3--Results] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSliceToInt] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSlicetoUUID] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBindError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteSliceToText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormatEncoding] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormats] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCloseBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommit] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransaction] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransactionWithCancelContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnExecDeadlock] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnListen] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.Background] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout_exceeded] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlisten] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlistenAll] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNoticeHandler_Simple] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNotificationHandler_Simple] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelBegin] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyFromError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInBinaryError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInMultipleValues] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInRaiseStmtTrigger] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInSchemaStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmt] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[produce-reduce_multi_in_difftype_assume--Results] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmtAffectedRows] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInTypes] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInWrongType] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyOutsideOfTxnError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyRespLoopConnectionError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopySyntaxError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestDataType] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeLength] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeName] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypePrecisionScale] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeBool] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBinaryError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEmptyQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestEmptyResultSetColumns] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeAndParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeDecode] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorClass] >> test.py::test[join-premap_common_right_tablecontent--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_inner--Results] >> docker_wrapper_test.py::test_pg_generated[TestErrorClass] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartup] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartupClosesConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQueryRowSimpleQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorSQLState] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayValue] >> test.py::test[window-distinct_over_window_full_frames--Results] [GOOD] >> test.py::test[window-full/leadlag_compact--Results] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatAndParseTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTsBackend] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestFullParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanDelimiter] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestHasCorrectRootGroupPermissions] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIPv6LoopbackParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInfinityTimestamp] >> docker_wrapper_test.py::test_pg_generated[TestInfinityTimestamp] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInvalidProtocolParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIsUTF8] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue1046] >> docker_wrapper_test.py::test_pg_generated[TestIssue1046] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue1062] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue186] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue196] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue282] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue494] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue617] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerConnCloseWhileQueryIsExecuting] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerFailedQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerListen] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerReconnect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlisten] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlistenAll] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestMinimalURL] [GOOD] >> test.py::test[tpch-q8-default.txt-Results] [GOOD] >> test.py::test[tpch-q9-default.txt-Results] >> docker_wrapper_test.py::test_pg_generated[TestMultipleEmptyResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Connect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Driver] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_WorksWithOpenDB] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewListenerConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNoData] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNotifyExtra] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNullAfterNonNull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestOpenURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParameterCountMismatch] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArray] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArrayError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseComplete] >> test.py::test[ql_filter-integer_single_equals--Results] [GOOD] >> test.py::test[result_types-data-default.txt-ForceBlocks] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseComplete] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseEnviron] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseErrorInExtendedQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestParseOpts] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTsErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPgpass] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelRace] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelledReused] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryRowBugWorkaround] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQuickClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteIdentifier] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteLiteral] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReadFloatPrecision] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReconnect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestReturning] [SKIPPED] >> test.py::test[aggregate-group_by_cube_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols--Results] [GOOD] >> test.py::test[column_order-join_nosimple--Results] [SKIPPED] >> test.py::test[column_order-union_all-default.txt-Results] >> docker_wrapper_test.py::test_pg_generated[TestRowsCloseBeforeDone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsColumnTypes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsResultTag] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestRuntimeParameters] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_passed_when_disabled] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_set_for_IPv4] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_passed_when_asked_for] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_set_by_default] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSSLClientCertificates] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLConnection] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLRequireWithRootCert] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyCA] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyFull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestScanNilTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestScanTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStatment] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.Background] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout_exceeded] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.Background] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout_exceeded] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanUnsupported] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToBytea] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringWithNul] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToInt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextDecodeIntoString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+00:00_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:00_=>_0000-01-01T11:59:59+04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:01:02_=>_0000-01-01T11:59:59+04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59-04:01:02_=>_0000-01-01T11:59:59-04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00-04:00_=>_0000-01-02T00:00:00-04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.0+00_=>_0000-01-02T00:00:00Z] [GOOD] >> test.py::test[join-mergejoin_left_null_column--Results] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.000000+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00Z_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/11:59:59_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.000000_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.0_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithOutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithTimeZone] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestTxOptions] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] >> test.py::test[action-eval_sample--Results] [GOOD] >> test.py::test[action-eval_values_output_table_subquery--Results] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] [GOOD] >> test.py::test[in-huge_in-default.txt-Results] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Results] >> test.py::test[join-pullup_left-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_left-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming--ForceBlocks] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] >> test.py::test[aggregate-group_by_mul_gs_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] >> test.py::test[tpch-q4-default.txt-Results] >> test_stop.py::TestStop::test_stop_query[v1-analytics] [GOOD] >> test.py::test[join-premap_map_semi--Results] [GOOD] >> test.py::test[join-premap_merge_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_with_remap--Results] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_only_semi_and_other--Results] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] [GOOD] >> test.py::test[tpch-q10-default.txt-Results] >> test.py::test[blocks-decimal_op_decimal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] [GOOD] >> test.py::test[select-host_count--Results] >> test.py::test[action-export_action--Results] [GOOD] >> test.py::test[action-unwrap_runtime_fail_with_column_message--Results] >> test.py::test[blocks-combine_hashed_minmax_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] >> test.py::test[join-left_trivial--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[column_order-union_all-default.txt-Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] >> test.py::test[join-premap_merge_extrasort2--Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off-Results] [SKIPPED] >> test.py::test[join-pullup_random--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] Test command err: ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--ForceBlocks] >> test.py::test[aggregate-percentiles_ungrouped--Results] [GOOD] >> test.py::test[aggregate-rollup_with_dict--Results] >> test.py::test[hor_join-skip_yamr--Results] >> test.py::test[flatten_by-flatten_few_fields--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] >> test.py::test[join-premap_merge_inner--Results] [GOOD] >> test.py::test[join-right_trivial--Results] >> test.py::test[action-eval_values_output_table_subquery--Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test.py::test[optimizers-keep_sort_with_renames--Results] [GOOD] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[action-unwrap_runtime_fail_with_column_message--Results] [GOOD] >> test.py::test[aggr_factory-list--Results] >> test.py::test[pg-tpch-q08-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] |84.2%| [TA] $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] |84.2%| [TA] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[blocks-combine_hashed_minmax_nested--Results] [GOOD] >> test.py::test[blocks-date_sub_interval--ForceBlocks] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] [GOOD] >> test.py::test[csee-yql-7237--Results] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt--Results] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] >> test.py::test[join-pullup_renaming--ForceBlocks] [GOOD] >> test.py::test[join-pullup_renaming--Results] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test.py::test[result_types-data-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-data-default.txt-Results] >> test.py::test[in-huge_in-default.txt-Results] [GOOD] >> test.py::test[insert-append_after_replace-default.txt-Results] >> test.py::test[schema-limit_directread--Results] >> test.py::test[hor_join-skip_yamr--Results] [GOOD] >> test.py::test[hor_join-table_record--Results] >> test.py::test[tpch-q4-default.txt-Results] [GOOD] >> test.py::test[tpch-q7-default.txt-Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge2-default.txt-Results] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> test.py::test[join-left_trivial-off-ForceBlocks] >> test.py::test[window-full/leadlag_compact--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] >> test.py::test[join-premap_merge_with_remap--Results] [GOOD] >> test.py::test[join-pullup_cross-off-Results] [SKIPPED] >> test.py::test[join-pullup_exclusion--Results] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[join-pullup_renaming-off-ForceBlocks] >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] [GOOD] >> test.py::test[optimizers-simplified_path_constraint--Results] [SKIPPED] >> test.py::test[optimizers-yql-17413-topsort--Results] >> test.py::test[result_types-data-default.txt-Results] [GOOD] >> test.py::test[sampling-reduce_with_presort--ForceBlocks] [SKIPPED] >> test.py::test[sampling-reduce_with_presort--Results] >> test.py::test[select-host_count--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] >> test.py::test[type_v3-decimal_yt--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] >> test.py::test[sampling-reduce_with_presort--Results] [SKIPPED] >> test.py::test[join-right_trivial--Results] [GOOD] >> test.py::test[join-three_equalities-off-Results] [SKIPPED] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] [GOOD] >> test.py::test[join-premap_common_left_cross--Results] >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] [GOOD] >> test.py::test[aggregate-rollup_with_dict--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] >> test.py::test[schema-limit_directread--Results] [GOOD] >> test.py::test[schema-select_all-row_spec-Results] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[sampling-reduce_with_presort--Results] [SKIPPED] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[select-host_count--Results] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Results] >> test.py::test[schema-select_all_inferschema_limit--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] >> test.py::test[insert-append_after_replace-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted--Results] >> test.py::test[action-subquery_merge2-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Results] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test.py::test[hor_join-table_record--Results] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] >> test.py::test[blocks-string_with--ForceBlocks] >> test.py::test[join-left_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-left_trivial-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-ForceBlocks] >> test.py::test[join-filter_joined-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns1--Results] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] |84.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] [GOOD] >> test.py::test[type_v3-split--Results] [SKIPPED] >> test.py::test[type_v3-uuid--Results] >> test.py::test[blocks-interval_add_date_scalar--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--ForceBlocks] >> test.py::test[aggr_factory-list--Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] >> test.py::test[schema-select_all-row_spec-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] >> test.py::test[hor_join-merge_multiouts_part--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [GOOD] >> test.py::test[hor_join-merge_multiouts_part--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Results] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--Results] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] >> test.py::test[tpch-q7-default.txt-Results] [GOOD] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [GOOD] >> test.py::test[expr-langver--Results] >> test.py::test[csee-yql-7237--Results] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Results] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--ForceBlocks] >> test.py::test[join-pullup_renaming-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_renaming-off-Results] [SKIPPED] >> test.py::test[join-right_trivial--ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Results] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test.py::test[insert-append_sorted--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] >> test.py::test[join-pullup_exclusion--Results] [GOOD] >> test.py::test[join-three_equalities--Results] >> test.py::test[schema-select_all_inferschema_limit--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] [GOOD] >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> test.py::test[order_by-assume_with_filter--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_stop.py::TestStop::test_stop_query[v1-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=374570) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] >> test.py::test[optimizers-yql-17413-topsort--Results] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] >> test.py::test[join-pushdown_filter_over_left--Results] [GOOD] >> test.py::test[join-right_trivial-off-Results] [SKIPPED] >> test.py::test[join-star_join--Results] >> test.py::test[schema-select_all_inferschema_limit--Results] [GOOD] >> test.py::test[blocks-string_with--ForceBlocks] [GOOD] >> test.py::test[blocks-string_with--Results] >> test.py::test[type_v3-uuid--Results] [GOOD] >> test.py::test[udf-udf--Results] >> test.py::test[schema-select_field-schema-ForceBlocks] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow--Results] >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_flags--Results] >> test.py::test[select-append_to_value--Results] >> test.py::test[expr-langver--Results] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Results] >> test.py::test[lineage-select_group_by_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-keep_sort_with_renames--ForceBlocks] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [GOOD] >> test.py::test[udf-udaf_distinct--Results] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] >> test.py::test[blocks-interval_sub_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-yield_on-default.txt-Results] >> test.py::test[join-premap_common_left_cross--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-star_join-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] >> test.py::test[blocks-string_with--Results] [GOOD] >> test.py::test[blocks-struct_type--ForceBlocks] >> test.py::test[join-right_trivial--ForceBlocks] [GOOD] >> test.py::test[join-right_trivial--Results] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [SKIPPED] >> test.py::test[join-strict_keys--Results] >> test.py::test[join-flatten_columns1--Results] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] >> test.py::test[order_by-assume_with_filter--Results] [GOOD] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] >> test.py::test[order_by-order_by_tablerecord_column--Results] >> test.py::test[hor_join-filters--Results] >> test.py::test[aggr_factory-hll-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test.py::test[aggregate-group_by_tablerow_column--Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat-Results] [SKIPPED] >> test.py::test[bigdate-tz_table_fill--Results] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] [GOOD] >> test.py::test[insert-append_with_read_udf_fail--Results] >> test.py::test[schema-select_all-yamred_dsv-Results] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Results] >> test.py::test[in-in_enum_single0-default.txt-Results] [GOOD] >> test.py::test[insert-after_group_by-default.txt-Results] >> test.py::test[hor_join-yield_on-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_join--ForceBlocks] >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[type_v3-singulars--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script--Results] >> test.py::test[blocks-interval_sub_interval--Results] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q21-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_stream-empty-Results] >> test.py::test[join-strict_keys--Results] [GOOD] >> test.py::test[join-yql-14847-off-Results] [SKIPPED] >> test.py::test[join-yql-8980--Results] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] >> test.py::test[udf-udf--Results] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> test.py::test[schema-select_field-schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> test.py::test[select-append_to_value--Results] [GOOD] >> test.py::test[select-boolean_where--Results] >> test.py::test[join-premap_nonseq_flatmap--Results] >> test.py::test[join-right_trivial--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--ForceBlocks] >> test.py::test[flatten_by-flatten_with_join--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] >> test.py::test[blocks-date_sub_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Results] >> test.py::test[insert-append_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf--Results] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] >> test.py::test[join-three_equalities--Results] [GOOD] >> test.py::test[join-trivial_view--Results] >> test.py::test[distinct-distinct_window-default.txt-Results] [GOOD] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_mix2--ForceBlocks] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] [GOOD] >> test.py::test[join-mergejoin_force_align3--Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_anonymous--Results] >> test.py::test[blocks-struct_type--ForceBlocks] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[action-eval_if-default.txt-ForceBlocks] >> test.py::test[optimizers-keep_sort_with_renames--ForceBlocks] [GOOD] >> test.py::test[optimizers-keep_sort_with_renames--Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[join-mergejoin_force_align3--Results] [SKIPPED] >> test.py::test[udf-udaf_distinct--Results] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] >> test.py::test[produce-process_with_python_stream-empty-Results] [GOOD] >> test.py::test[produce-reduce_all_field_subset--Results] >> test.py::test[hor_join-filters--Results] [GOOD] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[join-inner_on_key_only--Results] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by--Results] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] >> test.py::test[schema-select_all_inferschema-extra_field-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> test.py::test[udf-named_args_for_script--Results] [GOOD] >> test.py::test[udf-regexp_udf--Results] >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] >> test.py::test[blocks-decimal_unary--ForceBlocks] >> test.py::test[join-mapjoin_with_anonymous--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted--ForceBlocks] >> test.py::test[window-full/noncompact_with_tablerow--Results] [GOOD] >> test.py::test[window-generic/session--Results] >> test.py::test[blocks-struct_type--Results] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail2--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail2--Results] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail2--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Results] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols--ForceBlocks] >> test.py::test[aggr_factory-sum_if-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--Results] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] >> test.py::test[select-boolean_where--Results] [GOOD] >> test.py::test[select-deep_udf_call--Results] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] >> test.py::test[join-star_join--Results] [GOOD] >> test.py::test[join-star_join_semionly-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.251005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.251028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.251034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.251039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.251044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.251048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.251058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.251073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.251167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.251241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.273238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.273258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.273345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.274781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.274812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.274835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.275424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.275457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.275552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.275586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.275905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.276087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.276095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.276141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.276147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.276153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.276174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.277346Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.307928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.307996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.308051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.308108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.308118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.308699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.308719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.308764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.308773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.308778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.308783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.309106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.309114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.309118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.309374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.309381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.309385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.309391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.310029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.310349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.310379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.310544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.310585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.310592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.310645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.310652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.310679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.310689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.311099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.311105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.311133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.311138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.311147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.311152Z node 1 :FLAT_TX_SCHEMESHARD I ... Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:07:26.774251Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:07:26.774421Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:07:26.774429Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T03:07:26.774438Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:07:26.774490Z node 404 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:07:26.774514Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:07:26.774533Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:07:26.774557Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:07:26.774561Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:07:26.774567Z node 404 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T03:07:26.774618Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:07:26.774635Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 1735166789732 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:07:26.774641Z node 404 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T03:07:26.774658Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:07:26.774667Z node 404 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:07:26.774671Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:07:26.774676Z node 404 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:07:26.774679Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:07:26.774687Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:07:26.774696Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:07:26.774701Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:07:26.774706Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:07:26.774710Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:07:26.774714Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:07:26.774722Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:07:26.774727Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:07:26.774731Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T03:07:26.774735Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:07:26.775306Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.775332Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T03:07:26.775339Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T03:07:26.775433Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-05-05T03:07:26.775442Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-05T03:07:26.775838Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.775864Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:07:26.775915Z node 404 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:07:26.775921Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:07:26.775951Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:07:26.775976Z node 404 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:07:26.775981Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [404:208:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:07:26.775986Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [404:208:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 2025-05-05T03:07:26.776118Z node 404 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.776133Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.776138Z node 404 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:07:26.776143Z node 404 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:07:26.776147Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:07:26.776250Z node 404 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.776263Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.776267Z node 404 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:07:26.776271Z node 404 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:07:26.776278Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:07:26.776290Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:07:26.776296Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [404:130:2154] 2025-05-05T03:07:26.776362Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:07:26.776368Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:07:26.776378Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:07:26.776841Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.777163Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:07:26.777190Z node 404 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:07:26.777201Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:07:26.777209Z node 404 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:07:26.777213Z node 404 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:07:26.777218Z node 404 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T03:07:26.777273Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:07:26.777612Z node 404 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T03:07:26.777671Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T03:07:26.777678Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T03:07:26.777759Z node 404 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T03:07:26.777777Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T03:07:26.777782Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [404:871:2805] TestWaitNotification: OK eventTxId 1003 >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--Results] >> test.py::test[order_by-order_by_udf--Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-prune_keys--ForceBlocks] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] >> test.py::test[optimizers-keep_sort_with_renames--Results] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-ForceBlocks] >> test.py::test[join-star_join_inners_vk_sorted--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--Results] >> test.py::test[insert-after_group_by-default.txt-Results] [GOOD] >> test.py::test[insert-append--Results] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:06:22] "GET /mylib.sql HTTP/1.1" 200 - |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] >> test.py::test[schema-user_schema_mix2--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix2--Results] >> test.py::test[flatten_by-flatten_with_resource--Results] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] >> test.py::test[action-eval_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] >> test.py::test[join-trivial_view--Results] [GOOD] >> test.py::test[join-yql-12022-off-Results] [SKIPPED] >> test.py::test[json-json_value/example--Results] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] >> test.py::test[udf-regexp_udf--Results] [GOOD] >> test.py::test[union_all-mix_map_and_project--Results] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_binding--Results] >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_mix2--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--ForceBlocks] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg--Results] >> test.py::test[schema-select_all_inferschema_limit--Results] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] >> test.py::test[blocks-decimal_unary--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_unary--Results] >> test.py::test[action-eval_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-ForceBlocks] >> test.py::test[join-join_no_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-join_without_column--Results] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Results] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] >> test.py::test[produce-reduce_all_field_subset--Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[select-dict_lookup-default.txt-Results] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps--Results] [SKIPPED] >> test.py::test[result_types-pg-default.txt-Results] >> test.py::test[insert-multiappend_sorted-default.txt-Results] [GOOD] >> test.py::test[insert-unique_distinct_hints--Results] >> test.py::test[join-mergejoin_force_one_sorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted--Results] >> test.py::test[join-yql-8980--Results] [GOOD] >> test.py::test[join-yql_465--Results] >> test.py::test[column_order-insert_with_reorder_cols--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Results] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-ForceBlocks] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] >> test.py::test[pg-aggregate_combine--Results] >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Results] >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test.py::test[blocks-interval_add_interval--ForceBlocks] >> test.py::test[in-in_compact_distinct-empty-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=366564) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] >> test.py::test[join-prune_keys--ForceBlocks] [GOOD] >> test.py::test[join-prune_keys--Results] >> test.py::test[join-mergejoin_force_one_sorted--Results] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] >> test.py::test[aggregate-agg_filter_pushdown--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order--ForceBlocks] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] >> test.py::test[binding-table_regexp_binding--Results] [GOOD] >> test.py::test[blocks-block_input--Results] [SKIPPED] >> test.py::test[blocks-combine_all_pg--Results] >> test.py::test[column_order-insert_with_reorder_cols--Results] [GOOD] >> test.py::test[column_order-join--ForceBlocks] [SKIPPED] >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[distinct-distinct_groupby-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] >> test.py::test[action-action_eval_cluster_use--Results] >> test.py::test[schema-yamred_dsv_select_from_dict--ForceBlocks] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_no_infer--Results] >> test.py::test[result_types-pg-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] >> test.py::test[union_all-mix_map_and_project--Results] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-Results] >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Results] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_count--Results] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> test.py::test[union_all-path_and_record-default.txt-Results] >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt-Results] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--ForceBlocks] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-ForceBlocks] >> test.py::test[join-join_without_column--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] >> test.py::test[select-dict_lookup-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] >> test.py::test[join-yql_465--Results] [GOOD] >> test.py::test[key_filter-decimal--Results] >> test.py::test[weak_field-weak_field_in_group_by--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[json-json_value/example--Results] [GOOD] >> test.py::test[key_filter-convert--Results] >> test.py::test[window-generic/session--Results] [GOOD] >> test.py::test[window-leading/aggregations--Results] >> test.py::test[join-prune_keys--Results] [GOOD] >> test.py::test[join-pullup_random--ForceBlocks] >> test.py::test[in-in_compact_distinct-empty-Results] [GOOD] >> test.py::test[in-in_noansi_join--Results] >> test.py::test[expr-tagged_runtime-default.txt-Results] [GOOD] >> test.py::test[file-file_constness--Results] >> test.py::test[aggr_factory-corellation-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-from_erasure_to_none--Results] >> test.py::test[blocks-interval_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_interval--Results] >> test.py::test[join-yql-14829_leftonly-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Results] [SKIPPED] >> test.py::test[join-yql-16011--ForceBlocks] [SKIPPED] >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] >> test.py::test[pg-aggregate_combine--Results] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] >> test.py::test[in-in_ansi_join--ForceBlocks] [GOOD] >> test.py::test[in-in_ansi_join--Results] >> test.py::test[join-yql-14829_left--Results] [GOOD] >> test.py::test[join-yql-4275--Results] >> test.py::test[schema-user_schema_no_infer--Results] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] >> test.py::test[insert-unique_distinct_hints--Results] [GOOD] >> test.py::test[insert-values_subquery--Results] [SKIPPED] >> test.py::test[insert_monotonic-keep_unique--Results] [SKIPPED] >> test.py::test[insert_monotonic-to_empty--Results] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[action-action_eval_cluster_use--Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Results] >> test.py::test[order_by-literal_empty_list_sort--Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] >> test.py::test[distinct-distinct_groupby-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Results] >> test.py::test[join-mergejoin_with_reverse_key_order--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] >> test.py::test[select-backtick_with_escapes-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] >> test.py::test[union_all-union_all_multiple-default.txt-Results] [GOOD] >> test.py::test[blocks-interval_add_interval--Results] [GOOD] >> test.py::test[blocks-interval_mul--ForceBlocks] >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-ForceBlocks] >> test.py::test[select-backtick_with_escapes-default.txt-Results] >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted_by_tuple--ForceBlocks] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] [GOOD] >> test.py::test[select-exists_with_table-default.txt-Results] >> test.py::test[view-file_eval--Results] >> test.py::test[union_all-path_and_record-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_multiin--Results] >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Results] >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[action-eval_range--Results] >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[insert-from_erasure_to_none--Results] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] >> test.py::test[key_filter-convert--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] >> test.py::test[pg-join_using_tables3-default.txt-Results] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] >> test.py::test[window-generic/aggregations_include_current--Results] [GOOD] >> test.py::test[window-generic/session_aliases--Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] [GOOD] >> test.py::test[join-order_of_qualified-off-ForceBlocks] >> test.py::test[select-backtick_with_escapes-default.txt-Results] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null--Results] >> test.py::test[select-discard-default.txt-ForceBlocks] >> test.py::test[join-pullup_random--ForceBlocks] [GOOD] >> test.py::test[join-pullup_random--Results] >> test.py::test[view-file_eval--Results] [GOOD] >> test.py::test[view-secure_eval_dyn--Results] >> test.py::test[distinct-distinct_groupby-default.txt-Results] [GOOD] >> test.py::test[epochs-use_and_drop_anonymous--ForceBlocks] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Results] [SKIPPED] >> test.py::test[expr-len--ForceBlocks] >> test.py::test[pg-tpcds-q78-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] [GOOD] >> test.py::test[select-result_rows_limit--Results] [SKIPPED] >> test.py::test[select-substring-default.txt-Results] >> test.py::test[insert_monotonic-to_empty--Results] [GOOD] >> test.py::test[join-aggr_diff_order-default.txt-Results] >> test.py::test[order_by-literal_single_item_sort--Results] [GOOD] >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[order_by-order_by_dynum-default.txt-Results] >> test.py::test[file-file_constness--Results] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-ForceBlocks] >> test.py::test[select-exists_with_table-default.txt-Results] [GOOD] >> test.py::test[select-optional_in_job--Results] >> test.py::test[key_filter-decimal--Results] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Results] >> test.py::test[view-secure_eval_dyn--Results] [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_opt-default.txt-Results] >> test.py::test[view-view_with_lambda--Results] >> test.py::test[join-yql-4275--Results] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Results] >> test.py::test[join-join_without_correlation_and_dict_access--Results] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Results] >> test.py::test[join-lookupjoin_inner_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty--Results] >> test.py::test[action-eval_range--Results] [GOOD] >> test.py::test[action-eval_skip_take--Results] >> test.py::test[in-in_sorted_by_tuple--ForceBlocks] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Results] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-star_join_inners--ForceBlocks] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-ForceBlocks] >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] [GOOD] >> test.py::test[union_all-union_all_multiin--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] >> test.py::test[join-left_all--Results] >> test.py::test[key_filter-is_null--Results] [GOOD] >> test.py::test[key_filter-mixed_sort--Results] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_pg--Results] [GOOD] >> test.py::test[blocks-combine_all_some_filter--Results] >> test.py::test[join-order_of_qualified-off-ForceBlocks] [GOOD] >> test.py::test[join-order_of_qualified-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross-off-ForceBlocks] >> test.py::test[select-substring-default.txt-Results] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Results] >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] [GOOD] >> test.py::test[insert-override-with_read_udf-Results] >> test.py::test[expr-len--ForceBlocks] [GOOD] >> test.py::test[expr-len--Results] >> test.py::test[pg-select_alias_partial-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] [GOOD] >> test.py::test[pg-name--Results] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] >> test.py::test[blocks-interval_mul--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul--Results] >> test.py::test[window-leading/aggregations--Results] [GOOD] >> test.py::test[window-rank/opt--Results] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Results] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] >> test.py::test[pg-tpcds-q85-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_empty--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] >> test.py::test[binding-anon_table_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-Results] >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[expr-yql-10180-default.txt-ForceBlocks] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--Results] >> test.py::test[key_filter-dict_contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] >> test.py::test[view-view_with_lambda--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] >> test.py::test[order_by-order_by_mul_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[window-current/session_extended--Results] >> test.py::test[join-aggr_diff_order-default.txt-Results] [GOOD] >> test.py::test[join-alias_where_group-off-Results] [SKIPPED] >> test.py::test[join-anyjoin_common_dup-off-Results] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys--Results] >> test.py::test[blocks-pg_to_strings--Results] >> test.py::test[binding-anon_table_binding-default.txt-Results] [GOOD] >> test.py::test[binding-bind_select-default.txt-ForceBlocks] >> test.py::test[select-optional_in_job--Results] [GOOD] >> test.py::test[select-optional_pull--Results] >> test.py::test[flatten_by-flatten_and_where--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Results] >> test.py::test[blocks-interval_mul--Results] [GOOD] >> test.py::test[column_group-hint_append_fail-diff_grp-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[aggr_factory-histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-ForceBlocks] >> test.py::test[action-eval_skip_take--Results] [GOOD] >> test.py::test[action-insert_after_eval--Results] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[view-secure_eval--Results] >> test.py::test[join-star_join_inners--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners--Results] >> test.py::test[select-tablepathprefix-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] >> test.py::test[insert-override-with_read_udf-Results] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-ordered_fill--ForceBlocks] >> test.py::test[pg-select_subquery-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-Results] >> test.py::test[blocks-combine_all_some_filter--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Results] >> test.py::test[pg-name--Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] |84.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[select-discard-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-discard-default.txt-Results] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] >> test.py::test[join-premap_map_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] >> test.py::test[join-premap_map_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] >> test.py::test[in-in_ansi_join--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] >> test.py::test[key_filter-mixed_sort--Results] [GOOD] >> test.py::test[library-package_override--Results] [SKIPPED] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[join-left_all--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_optional--Results] >> test.py::test[view-secure_eval--Results] [GOOD] >> test.py::test[view-trivial_view--Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[expr-yql-10180-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Results] |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Results] >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary--Results] >> test.py::test[binding-bind_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-bind_select-default.txt-Results] |84.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Results] >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=338237) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:06:03] send response localhost:28095/?database=local ::1 - - [05/May/2025 03:06:03] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Results] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] |84.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[action-insert_after_eval--Results] [GOOD] >> test.py::test[action-parallel_for-default.txt-Results] >> test.py::test[select-optional_pull--Results] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-select_subquery--Results] >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> test.py::test[binding-drop_binding--ForceBlocks] >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] >> test.py::test[blocks-pg_to_strings--Results] [GOOD] >> test.py::test[blocks-sub_uint64_opt2--Results] >> test.py::test[schema-read_schema_other--Results] >> test.py::test[order_by-ordered_fill--ForceBlocks] [GOOD] >> test.py::test[order_by-ordered_fill--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] |84.4%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test.py::test[view-trivial_view--Results] [GOOD] >> test.py::test[view-view_with_lambda_process--Results] >> test.py::test[join-premap_merge_extrasort1--Results] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test.py::test[pg-select_qstarref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Results] >> test.py::test[select-discard-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-ForceBlocks] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-limit-dynamic-Results] >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[key_filter-no_bypass_merge--Results] [SKIPPED] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] [GOOD] >> test.py::test[window-udaf_window--Results] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--ForceBlocks] >> test.py::test[aggr_factory-hll-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Results] >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--Results] [GOOD] >> test.py::test[join-bush_dis_in--Results] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] >> test.py::test[window-rank/opt--Results] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] >> test.py::test[order_by-literal--Results] >> test.py::test[join-left_join_right_pushdown_optional--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_take_skip--Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] >> test.py::test[select-reuse_named_node-default.txt-Results] [GOOD] >> test.py::test[select-sample_limit_recordindex--Results] >> test.py::test[window-current/session_extended--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Results] >> test.py::test[binding-drop_binding--ForceBlocks] [GOOD] >> test.py::test[binding-drop_binding--Results] >> test.py::test[blocks-sub_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-tuple_nth--Results] |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] >> test.py::test[insert-select_subquery--Results] [GOOD] >> test.py::test[insert-yql-13083-existig-Results] >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test.py::test[hor_join-empty_out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] >> test.py::test[join-mergejoin_big_primary--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Results] >> test.py::test[schema-read_schema_other--Results] [GOOD] >> test.py::test[schema-select_all-read_schema-Results] >> test.py::test[action-parallel_for-default.txt-Results] [GOOD] >> test.py::test[action-pending_arg_fail--Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [SKIPPED] >> test.py::test[join-trivial_view--ForceBlocks] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-ForceBlocks] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] >> test.py::test[view-view_with_lambda_process--Results] [GOOD] >> test.py::test[weak_field-weak_field--Results] >> test.py::test[aggregate-group_by_rollup_grouping--Results] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] [SKIPPED] >> test.py::test[sampling-subquery_sort-default.txt-Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by--Results] >> test.py::test[blocks-pg_to_interval--Results] >> test.py::test[limit-limit-dynamic-Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Results] |84.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-ForceBlocks] |84.4%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [GOOD] >> test.py::test[select-refselect-1000-ForceBlocks] [GOOD] >> test.py::test[order_by-literal--Results] [GOOD] >> test.py::test[select-refselect-1000-Results] >> test.py::test[order_by-native_desc_assume_with_transform--Results] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-select_relabel-default.txt-ForceBlocks] >> test.py::test[action-pending_arg_fail--Results] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] >> test.py::test[order_by-warn_offset_wo_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Results] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] |84.4%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-udaf_window--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] >> test.py::test[pg-select_starref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] >> test.py::test[select-refselect-1000-Results] [GOOD] >> test.py::test[select-swap_columns-default.txt-ForceBlocks] >> test.py::test[schema-select_all-read_schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] |84.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_inner-off-ForceBlocks] >> test.py::test[order_by-warn_offset_wo_sort--Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] >> test.py::test[select-sample_limit_recordindex--Results] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] >> test.py::test[binding-named_node_corr_names-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] >> test.py::test[join-mergejoin_big_primary_unique--Results] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] |84.4%| [TA] $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} |84.4%| [TA] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[join-trivial_view--ForceBlocks] [GOOD] >> test.py::test[join-trivial_view--Results] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] [GOOD] >> test.py::test[blocks-tuple_nth--Results] [GOOD] >> test.py::test[column_group-hint-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_str_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-select_limit_offset-default.txt-Results] >> test.py::test[insert-yql-13083-existig-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--Results] >> test.py::test[window-win_extract_members-default.txt-Results] >> test.py::test[weak_field-weak_field--Results] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] >> test.py::test[blocks-pg_to_interval--Results] [GOOD] >> test.py::test[blocks-sort_one_asc--Results] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] >> test.py::test[sampling-subquery_sort-default.txt-Results] [GOOD] >> test.py::test[schema-insert-read_schema-Results] >> test.py::test[tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> test.py::test[binding-named_node_corr_names-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-ForceBlocks] >> test.py::test[join-equi_join_three_asterisk--Results] >> test.py::test[pg-select_unionall_self-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] [GOOD] >> test.py::test[lineage-error_type--Results] [SKIPPED] >> test.py::test[lineage-pullup_rename--Results] [SKIPPED] >> test.py::test[lineage-scalar_context--Results] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] |84.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[key_filter-datetime-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-14157--Results] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> test.py::test[insert-select_relabel-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_relabel-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> test.py::test[blocks-date_not_equals_scalar--Results] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] >> test.py::test[select-swap_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-swap_columns-default.txt-Results] >> test.py::test[join-trivial_view--Results] [GOOD] >> test.py::test[join-trivial_view-off-ForceBlocks] >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Results] >> test.py::test[flatten_by-flatten_one_field_another--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] >> test.py::test[schema-select_all_inferschema_range--Results] [GOOD] >> test.py::test[schema-select_fields_inferschema--Results] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] >> test.py::test[blocks-date_less_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] >> test.py::test[window-win_func_aggr_4func_no_part--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] >> test.py::test[insert-select_relabel-default.txt-Results] [GOOD] >> test.py::test[insert-unique_distinct_hints--ForceBlocks] >> test.py::test[join-premap_merge_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] >> test.py::test[select-swap_columns-default.txt-Results] [GOOD] >> test.py::test[select-to_dict-default.txt-ForceBlocks] >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q9-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_hop_star--ForceBlocks] >> test.py::test[insert_monotonic-from_empty--Results] [GOOD] >> test.py::test[insert_monotonic-overlaping_fail--Results] [SKIPPED] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] >> test.py::test[window-full/noncompact_with_nulls--Results] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test.py::test[select-select_all_group_by_column--Results] [GOOD] >> test.py::test[select-struct_access_without_table_name--Results] >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] >> test.py::test[join-bush_dis_in--Results] [GOOD] >> test.py::test[join-bush_dis_in_in--Results] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_where-default.txt-ForceBlocks] >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[schema-other_job--Results] [SKIPPED] >> test.py::test[schema-patchtype--Results] >> test.py::test[pg-tpcds-q08-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-ForceBlocks] >> test.py::test[column_order-select_orderby-default.txt-Results] >> test.py::test[pg-tpcds-q18-default.txt-Results] >> test.py::test[blocks-sort_one_asc--Results] [GOOD] >> test.py::test[blocks-string_pass--Results] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--Results] >> test.py::test[action-eval_atom_wrong_type_param--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[action-evaluate_match_type-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] >> test.py::test[pg-tpcds-q39-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] >> test.py::test[blocks-filter_direct_col--Results] >> test.py::test[schema-select_fields_inferschema--Results] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Results] >> test.py::test[aggregate-compare_by--Results] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[window-rank/unordered--Results] >> test.py::test[join-trivial_view-off-ForceBlocks] [GOOD] >> test.py::test[join-trivial_view-off-Results] [SKIPPED] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] >> test.py::test[window-win_extract_members-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner--Results] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] >> test.py::test[pg-select_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_where-default.txt-Results] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] >> test.py::test[select-to_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-to_dict-default.txt-Results] >> test.py::test[aggregate-group_by_hop_star--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_star--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_aggr_expr--ForceBlocks] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> test.py::test[join-anyjoin_common_nodup-off-Results] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] [GOOD] >> test.py::test[column_order-select_orderby-default.txt-Results] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] >> test.py::test[select-struct_access_without_table_name--Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[table_range-range_over_regexp--Results] >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test.py::test[join-bush_in--Results] >> test.py::test[pg-tpcds-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-Results] >> test.py::test[key_filter-string_with-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] >> test.py::test[schema-patchtype--Results] [GOOD] >> test.py::test[schema-remap_desc--Results] >> test.py::test[blocks-string_pass--Results] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] >> test.py::test[blocks-date_sub_interval--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] >> test.py::test[pg-select_where-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] >> test.py::test[aggregate-agg_phases_table3-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] >> test.py::test[tpch-q9-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q9-default.txt-Results] >> test.py::test[action-evaluate_match_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_match_type-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] >> test.py::test[blocks-filter_direct_col--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--Results] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [GOOD] >> test.py::test[blocks-interval_div_scalar--Results] >> test.py::test[join-equi_join_three_asterisk--Results] [GOOD] >> test.py::test[join-flatten_columns1-off-Results] [SKIPPED] >> test.py::test[join-inner_grouped--Results] >> test.py::test[schema-user_schema_directread-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_with_sort--Results] >> test.py::test[select-to_dict-default.txt-Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_with_group_by--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[binding-bind_select-default.txt-Results] >> test.py::test[insert-unique_distinct_hints--ForceBlocks] [GOOD] >> test.py::test[insert-unique_distinct_hints--Results] >> test.py::test[aggr_factory-variance-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Results] >> test.py::test[action-evaluate_match_type-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-ForceBlocks] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> test.py::test[table_range-range_over_regexp--Results] [GOOD] >> test.py::test[table_range-range_slash--Results] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] >> test.py::test[pg-tpcds-q32-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-Results] >> test.py::test[pg-tpch-q21-default.txt-Results] [GOOD] >> test.py::test[produce-process_rows_sorted_desc_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_by_struct-default.txt-ForceBlocks] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[ql_filter-integer_members--Results] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-grouping_sets--Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-ForceBlocks] >> test.py::test[insert-unique_distinct_hints--Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--ForceBlocks] >> test.py::test[select-trivial_between-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-many-Results] >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-19420--Results] >> test.py::test[blocks-combine_all_max_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_min--ForceBlocks] >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[schema-user_schema_override--Results] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] [GOOD] >> test.py::test[blocks-minmax_strings_filter--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] >> test.py::test[join-bush_dis_in_in--Results] [GOOD] >> test.py::test[join-bush_in-off-Results] [SKIPPED] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] >> test.py::test[window-generic/aggregations_after_current--Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> test.py::test[binding-drop_binding--Results] >> test.py::test[join-nopushdown_filter_over_inner--Results] [GOOD] >> test.py::test[join-pullup_extra_columns--Results] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] >> test.py::test[aggregate-compare_tuple--Results] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Results] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-sorted_sql_in--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group--Results] >> test.py::test[blocks-interval_div_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] >> test.py::test[aggregate-group_by_rollup_aggr_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] >> test.py::test[window-rank/unordered--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] >> test.py::test[select-trivial_between-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[schema-user_schema_with_sort--Results] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] >> test.py::test[window-win_func_auto_arg-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] [GOOD] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_min_filter_opt--ForceBlocks] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] >> test.py::test[join-bush_in--Results] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Results] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_common_type_unionall--Results] >> test.py::test[table_range-range_slash--Results] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] >> test.py::test[action-subquery_merge_nested_world-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] [GOOD] >> test.py::test[insert-insert_null-default.txt-Results] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] >> test.py::test[aggregate-group_by_expr_with_join--Results] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[join-bush_in--Results] [GOOD] >> test.py::test[select-trivial_where-many-Results] [GOOD] >> test.py::test[select-trivial_where-one-Results] >> test.py::test[join-inner_grouped--Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Results] >> test.py::test[pg-join_using_tables2-default.txt-Results] >> test.py::test[action-eval_column--Results] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] >> test.py::test[select-trivial_between-default.txt-Results] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[pg-tpcds-q36-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q47-default.txt-Results] >> test.py::test[column_order-select_subquery-default.txt-Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas--Results] [SKIPPED] >> test.py::test[dq-precompute_result-default.txt-Results] [SKIPPED] >> test.py::test[dq-read_cost-default.txt-Results] [SKIPPED] >> test.py::test[expr-empty_iterator2--Results] >> test.py::test[produce-reduce_by_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Results] >> test.py::test[aggregate-ensure_count-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] >> test.py::test[join-anyjoin_common_nodata_keys--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--Results] >> test.py::test[case-case_size_eq_cast-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce--Results] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-ForceBlocks] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [GOOD] >> test.py::test[udf-python_script--ForceBlocks] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[blocks-combine_all_min--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min--Results] >> test.py::test[join-pullup_extra_columns--Results] [GOOD] >> test.py::test[join-star_join_inners_premap--Results] >> test.py::test[select-exists_with_table-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--ForceBlocks] >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Results] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref--Results] >> test.py::test[select-backtick_with_escapes-default.txt-Results] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Results] >> test.py::test[key_filter-yql-19420--Results] [GOOD] >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Results] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unused_columns-default.txt-Results] >> test.py::test[lineage-unused_columns-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-field_subset_for_multiusage--Results] >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] >> test.py::test[produce-reduce_by_struct-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python--Results] >> test.py::test[window-win_func_first_last_rev--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Results] >> test.py::test[insert-insert_null-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta--Results] >> test.py::test[produce-reduce_with_python--Results] [SKIPPED] >> test.py::test[ql_filter-integer_select_other--ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[result_types-data-default.txt-Results] >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] |84.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--ForceBlocks] >> test.py::test[blocks-minmax_strings_filter--Results] [GOOD] >> test.py::test[blocks-not_opt--Results] >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> test.py::test[blocks-coalesce_ints--Results] >> test.py::test[lineage-grouping_sets--Results] [GOOD] >> test.py::test[lineage-list_literal4-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test.py::test[window-win_func_aggr_4func--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-decimal_avg--ForceBlocks] >> test.py::test[distinct-distinct_union_all-default.txt-Results] [GOOD] >> test.py::test[dq-wrong_script--Results] [SKIPPED] >> test.py::test[expr-as_table_emptylist--Results] >> test.py::test[optimizers-unused_columns_group--Results] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] >> test.py::test[select-two_selects_with_diff_fields-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] >> test.py::test[coalesce-coalesce--Results] [GOOD] >> test.py::test[column_group-groups-lookup-Results] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Results] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q47-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q66-default.txt-Results] >> test.py::test[join-anyjoin_common_nodata_keys--Results] [GOOD] >> test.py::test[join-bush_dis_in_in--ForceBlocks] >> test.py::test[column_group-groups-lookup-Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--Results] [SKIPPED] >> test.py::test[column_group-hint_unk_col_fail--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--Results] [SKIPPED] >> test.py::test[column_group-publish-perusage-Results] [SKIPPED] >> test.py::test[column_order-align_publish_native--Results] >> test.py::test[aggregate-group_by_hop_only_start--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] >> test.py::test[expr-empty_iterator2--Results] [GOOD] >> test.py::test[expr-non_persistable_inner_select_fail--Results] >> test.py::test[udf-python_script--ForceBlocks] [GOOD] >> test.py::test[udf-python_script--Results] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test.py::test[pg-join_using_tables2-default.txt-Results] [GOOD] >> test.py::test[pg-select_table1-default.txt-Results] >> test.py::test[join-pullup_inner--ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] >> test.py::test[aggregate-group_by_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] >> test.py::test[select-exists_with_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_with_table-default.txt-Results] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--Results] >> test.py::test[hor_join-row_num_per_sect--ForceBlocks] >> test.py::test[select-complex_filter_with_order-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] >> test.py::test[insert-keepmeta--Results] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--Results] >> test.py::test[join-inner_grouped_by_expr--Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-Results] >> test.py::test[aggr_factory-count_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> test.py::test[ql_filter-integer_select_other--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_select_other--Results] >> test.py::test[join-inner_grouped_by_expr-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test.py::test[udf-python_script--Results] [GOOD] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] >> test.py::test[select-exists_with_table-default.txt-Results] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-ForceBlocks] >> test.py::test[expr-non_persistable_inner_select_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_uint64--Results] >> test.py::test[blocks-not_opt--Results] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [GOOD] >> test.py::test[result_types-data-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--Results] >> test.py::test[expr-as_table_emptylist--Results] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan--Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] >> test.py::test[aggregate-group_by_session_extended_subset--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--Results] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> test.py::test[insert-keepmeta_proto_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_with_read_udf_fail--Results] >> test.py::test[ql_filter-integer_select_other--Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_count--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_count--Results] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--Results] >> test.py::test[blocks-mul_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-string_with--Results] >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[blocks-coalesce_ints--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[produce-reduce_with_python--Results] [SKIPPED] >> test.py::test[ql_filter-integer_single--Results] >> test.py::test[pg-tpcds-q66-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q71-default.txt-Results] >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q01-default.txt-Results] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--Results] >> test.py::test[join-star_join_inners_premap--Results] [GOOD] >> test.py::test[join-star_join_mirror--Results] >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--Results] [SKIPPED] >> test.py::test[aggr_factory-top_by-default.txt-ForceBlocks] >> test.py::test[window-win_func_first_last_with_part--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] >> test.py::test[optimizers-field_subset_for_multiusage--Results] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] [GOOD] >> test.py::test[expr-non_persistable_order_by_fail--Results] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> test.py::test[join-pullup_inner--ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner--Results] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> test.py::test[join-bush_dis_in_in--ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish_native--Results] [GOOD] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] >> test.py::test[blocks-decimal_avg--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in--Results] >> test.py::test[blocks-decimal_avg--Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] >> test.py::test[insert-keepmeta_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-override-from_sorted-Results] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test.py::test[aggregate-group_by_session_extended_subset--Results] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--ForceBlocks] >> test.py::test[select-dot_name_subrequest-default.txt-Results] [GOOD] >> test.py::test[select-if-default.txt-Results] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[order_by-assume_with_transform_desc--Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o--Results] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> test.py::test[hor_join-row_num_per_sect--ForceBlocks] [GOOD] >> test.py::test[hor_join-row_num_per_sect--Results] >> test.py::test[window-win_func_rank_by_all--Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_uint64--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--Results] >> test.py::test[expr-non_persistable_order_by_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_int--Results] >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[tpch-q1-default.txt-ForceBlocks] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[column_group-insert_diff_groups1_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_with_new_cols--Results] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[select-to_dict-default.txt-Results] >> test.py::test[pg-tpcds-q71-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-Results] >> test.py::test[pg-tpcds-q01-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Results] >> test.py::test[join-pullup_inner--Results] [GOOD] >> test.py::test[join-three_equalities--ForceBlocks] >> test.py::test[ql_filter-integer_single--Results] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> test.py::test[blocks-json_document_type--ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test.py::test[sampling-mapjoin_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] >> test.py::test[blocks-string_with--Results] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-ForceBlocks] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Results] >> test.py::test[hor_join-row_num_per_sect--Results] [GOOD] >> test.py::test[join-filter_joined-off-ForceBlocks] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> test.py::test[insert-override-from_sorted-Results] [GOOD] >> test.py::test[insert-override-with_view-Results] [SKIPPED] >> test.py::test[insert_monotonic-several2-default.txt-Results] >> test.py::test[join-mapjoin_dup_key-off-ForceBlocks] >> test.py::test[select-if-default.txt-Results] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Results] >> test.py::test[join-bush_dis_in_in--Results] [GOOD] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--ForceBlocks] [SKIPPED] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[join-full_equal_null-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_into_udf--Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test.py::test[aggregate-group_by_session_extended_subset--Results] [GOOD] >> test.py::test[aggregate-group_by_session_star--Results] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-topsort-default.txt-ForceBlocks] >> test.py::test[aggr_factory-top_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] [GOOD] >> test.py::test[optimizers-test_lmap_opts--Results] [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test.py::test[flatten_by-flatten_with_resource--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_part--Results] [SKIPPED] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_int--Results] [GOOD] >> test.py::test[file-where_key_in_file_content--Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--ForceBlocks] >> test.py::test[pg-tpcds-q20-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Results] >> test.py::test[pg-tpcds-q74-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] >> test.py::test[aggregate-percentile_and_avg_grouped--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] >> test.py::test[join-lookupjoin_semi_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_left_null_column--Results] >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-ForceBlocks] >> test.py::test[tpch-q1-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> test.py::test[blocks-json_document_type--ForceBlocks] [GOOD] >> test.py::test[blocks-json_document_type--Results] >> test.py::test[join-three_equalities--ForceBlocks] [GOOD] >> test.py::test[join-three_equalities--Results] >> test.py::test[like-like_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] [GOOD] >> test.py::test[sampling-insert--Results] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] >> test.py::test[distinct-distinct_columns-default.txt-Results] >> test.py::test[select-one_labeled_column-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-ForceBlocks] >> test.py::test[select-to_dict-default.txt-Results] [GOOD] >> test.py::test[select-uncorrelated_subqueries--Results] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] >> test.py::test[sampling-map-dynamic-Results] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_sample-default.txt-Results] >> test.py::test[blocks-top_sort_one_desc--Results] [GOOD] >> test.py::test[blocks-tuple_type--Results] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] >> test.py::test[insert_monotonic-several2-default.txt-Results] [GOOD] >> test.py::test[join-compact_join--Results] >> test.py::test[join-filter_joined-off-ForceBlocks] [GOOD] >> test.py::test[join-filter_joined-off-Results] [SKIPPED] >> test.py::test[join-from_in_front_join-off-ForceBlocks] >> test.py::test[join-star_join_mirror--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--Results] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--ForceBlocks] >> test.py::test[join-mapjoin_dup_key-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q79-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_null-off-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Results] >> test.py::test[join-full_equal_null-off-Results] [SKIPPED] >> test.py::test[join-join_no_correlation_in_order_by--ForceBlocks] >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[blocks-pg_to_strings--ForceBlocks] >> test.py::test[file-where_key_in_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] >> test.py::test[sampling-topsort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-topsort-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Results] [SKIPPED] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-ForceBlocks] >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [GOOD] >> test.py::test[join-three_equalities--Results] [GOOD] >> test.py::test[join-yql-19081--ForceBlocks] [SKIPPED] >> test.py::test[join-yql-19081--Results] [SKIPPED] >> test.py::test[json-json_exists/example--ForceBlocks] >> test.py::test[window-win_func_over_group_by_compl--Results] >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] >> test.py::test[pg-tpcds-q28-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Results] >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_left_null_column--Results] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] >> test.py::test[sampling-insert--Results] [GOOD] >> test.py::test[schema-def_values--Results] >> test.py::test[action-insert_after_eval_xlock--Results] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] >> test.py::test[aggregate-group_by_cube_expr_trio--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] >> test.py::test[aggregate-group_by_session_star--Results] [GOOD] >> test.py::test[aggregate-having_cast-default.txt-Results] >> test.py::test[blocks-tuple_type--Results] [GOOD] >> test.py::test[column_group-hint_append_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4--Results] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[pg-tpcds-q82-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-Results] >> test.py::test[join-from_in_front_join-off-ForceBlocks] [GOOD] >> test.py::test[join-from_in_front_join-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-off-ForceBlocks] >> test.py::test[column_order-select_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-Results] >> test.py::test[schema-fake_column-default.txt-Results] >> test.py::test[select-uncorrelated_subqueries--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] >> test.py::test[limit-empty_read_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] [GOOD] >> test.py::test[sampling-sample-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] >> test.py::test[pg-tpcds-q37-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-Results] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] >> test.py::test[join-join_no_correlation_in_order_by--ForceBlocks] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] >> test.py::test[join-join_no_correlation_in_order_by--Results] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--Results] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-ForceBlocks] >> test.py::test[aggregate-percentiles_grouped_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--Results] >> test.py::test[join-star_join_semionly_premap--Results] [GOOD] >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[json-json_exists/example--Results] >> test.py::test[blocks-pg_to_strings--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_strings--Results] >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> test.py::test[table_range-range_over_filter_udf--Results] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] >> test.py::test[join-compact_join--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-Results] [SKIPPED] >> test.py::test[join-full_equal_null--Results] >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Results] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-insert-row_spec-Results] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] [GOOD] >> test.py::test[in-in_compact_distinct--Results] >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] >> test.py::test[pg-tpcds-q92-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q03-default.txt-Results] >> test.py::test[join-join_no_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] >> test.py::test[action-nested_rewrite_io-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] >> test.py::test[window-win_func_lead_lag_opt--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-where_not_null--Results] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test.py::test[tpch-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_sorted_with_key_diff--ForceBlocks] >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q42-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt-Results] >> test.py::test[join-grace_join1-off-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial--ForceBlocks] >> test.py::test[blocks-pg_to_strings--Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--ForceBlocks] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[tpch-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-drop_table--ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_tuple--ForceBlocks] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--ForceBlocks] >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test.py::test[sampling-sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-sample-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test.py::test[json-json_exists/example--ForceBlocks] [GOOD] >> test.py::test[json-json_exists/example--Results] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--Results] >> test.py::test[aggregate-having_cast-default.txt-Results] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] >> test.py::test[pg-tpcds-q73-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--ForceBlocks] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--Results] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-ForceBlocks] >> test.py::test[count-count_distinct_from_view_concat--ForceBlocks] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Results] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] [GOOD] >> test.py::test[schema-insert-row_spec-Results] [GOOD] >> test.py::test[schema-insert_sorted-schema-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] >> test.py::test[insert_monotonic-not_all_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--Results] [GOOD] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-ForceBlocks] >> test.py::test[sampling-sample-default.txt-Results] [GOOD] >> test.py::test[sampling-sort-default.txt-ForceBlocks] >> test.py::test[select-where_not_null--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] >> test.py::test[window-win_multiaggr_list-default.txt-Results] [GOOD] >> test.py::test[window-win_over_few_partitions--Results] >> test.py::test[pg-tpcds-q46-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-Results] >> test.py::test[join-join_no_correlation_in_order_by-off-ForceBlocks] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] >> test.py::test[window-win_func_over_group_by_compl--Results] [GOOD] >> test.py::test[ytflow-select--Results] [SKIPPED] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_null--ForceBlocks] >> test.py::test[produce-reduce_with_python_input_stream--ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] >> test.py::test[join-full_equal_null--Results] [GOOD] >> test.py::test[join-full_trivial-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-off-Results] [SKIPPED] >> test.py::test[join-inner_all--Results] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] >> test.py::test[table_range-concat_sorted_with_key_diff--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] >> test.py::test[aggregate-group_by_full_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] >> test.py::test[pg-drop_table--ForceBlocks] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[blocks-combine_hashed_max--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[join-inner_trivial--ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial--Results] >> test.py::test[in-in_compact_distinct--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1--Results] [SKIPPED] >> test.py::test[hor_join-group_yamr--Results] >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Results] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[epochs-read_modified--Results] >> test.py::test[json-json_exists/example--Results] [GOOD] >> test.py::test[key_filter-is_null--ForceBlocks] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] [GOOD] >> test.py::test[lineage-join_as_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Results] >> test.py::test[column_order-select_win_func-default.txt-Results] [GOOD] >> test.py::test[column_order-union_all_positional_unordered_fail--Results] >> test.py::test[json-json_exists/example--Results] [GOOD] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[ytflow-select--Results] [SKIPPED] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[in-in_compact_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-ForceBlocks] >> test.py::test[aggregate-compare_tuple--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-ForceBlocks] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] >> test.py::test[produce-reduce_with_python_input_stream--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_python_input_stream--Results] [GOOD] >> test.py::test[produce-reduce_with_python_row--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_row--Results] [SKIPPED] >> test.py::test[sampling-insert--ForceBlocks] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--ForceBlocks] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> test.py::test[aggregate-group_by_gs_simp--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[json-json_exists/example--Results] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-Results] >> test.py::test[action-action_eval_cluster_and_table-default.txt-ForceBlocks] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Results] >> test.py::test[pg-tpch-q03-default.txt-Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] [GOOD] >> test.py::test[join-inner_trivial--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Results] >> test.py::test[join-inner_trivial-off-ForceBlocks] >> test.py::test[pg-tpcds-q55-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Results] >> test.py::test[count-count_distinct_from_view_concat--ForceBlocks] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--Results] >> test.py::test[schema-insert_sorted-schema-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] >> test.py::test[column_order-union_all_positional_unordered_fail--Results] [GOOD] >> test.py::test[count-count--Results] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--Results] >> test.py::test[join-mergejoin_with_table_range--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] >> test.py::test[sampling-sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-sort-default.txt-Results] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] >> test.py::test[hor_join-group_yamr--Results] [GOOD] >> test.py::test[hor_join-less_outs--Results] [SKIPPED] >> test.py::test[insert-append_missing_null-default.txt-Results] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[window-current/aggregations--ForceBlocks] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] >> test.py::test[bigdate-implicit_cast_callable-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[pg-tpch-q03-default.txt-Results] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_desc--ForceBlocks] >> test.py::test[aggregate-ensure_count-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] >> test.py::test[join-left_join_right_pushdown_null--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_null--Results] >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_and_distinct_key--Results] >> test.py::test[sampling-sort-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-ForceBlocks] >> test.py::test[pg-select_from_columns_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] >> test.py::test[optimizers-length_over_merge--ForceBlocks] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] [GOOD] >> test.py::test[join-cbo_7tables--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_7tables--Results] [SKIPPED] >> test.py::test[join-equi_join_by_expr--ForceBlocks] >> test.py::test[key_filter-is_null--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null--Results] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] [GOOD] >> test.py::test[schema-select_all-schema-Results] >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Results] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-Results] |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] >> test.py::test[type_v3-append_diff_layout1--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-ForceBlocks] >> test.py::test[blocks-date_less_or_equal_scalar--Results] [GOOD] >> test.py::test[action-action_eval_cluster_and_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] >> test.py::test[blocks-date_not_equals--ForceBlocks] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-ForceBlocks] >> test.py::test[ansi_idents-join_using-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--Results] >> test.py::test[join-inner_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial-off-Results] >> test.py::test[join-inner_trivial-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary_unique-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort--Results] >> test.py::test[key_filter-is_null--Results] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt-ForceBlocks] >> test.py::test[insert-append_missing_null-default.txt-Results] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Results] >> test.py::test[agg_apply-avg_const_interval--Results] [GOOD] >> test.py::test[sampling-insert--ForceBlocks] [GOOD] >> test.py::test[sampling-insert--Results] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] >> test.py::test[order_by-order_by_tablepath_column--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_csee--ForceBlocks] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-ForceBlocks] >> test.py::test[window-win_func_rank_by_opt_all--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] >> test.py::test[window-win_over_few_partitions--Results] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] >> test.py::test[key_filter-range_union--ForceBlocks] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q17-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test.py::test[join-left_join_right_pushdown_null--Results] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-Results] >> test.py::test[type_v3-append_struct-default.txt-Results] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] >> test.py::test[blocks-interval_add_interval_scalar--ForceBlocks] >> test.py::test[aggregate-ensure_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Results] >> test.py::test[schema-select_all-schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema2--Results] >> test.py::test[sampling-insert--Results] [GOOD] >> test.py::test[schema-insert_sorted-schema-ForceBlocks] >> test.py::test[sampling-subquery_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-Results] >> test.py::test[pg-tpcds-q83-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Results] >> test.py::test[pg-tpcds-q86-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_desc--ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] [GOOD] >> test.py::test[union-union_multiin--Results] >> test.py::test[join-equi_join_by_expr--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_by_expr--Results] >> test.py::test[optimizers-length_over_merge--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge--Results] >> test.py::test[optimizers-yql-5833-table_content--Results] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[join-left_join_right_pushdown_null--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1--Results] >> test.py::test[window-current/aggregations--ForceBlocks] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--ForceBlocks] >> test.py::test[window-current/aggregations--Results] >> test.py::test[pg-select_unionall_self-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] |84.7%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_saves_output_sort-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] >> test.py::test[binding-table_regexp_strict_binding--Results] [GOOD] >> test.py::test[blocks-add_uint16--Results] >> test.py::test[sampling-subquery_sort-default.txt-Results] [GOOD] >> test.py::test[sampling-system_sampling--ForceBlocks] [SKIPPED] >> test.py::test[sampling-system_sampling--Results] [SKIPPED] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Results] [GOOD] >> test.py::test[insert-part_sortness--Results] >> test.py::test[schema-copy-other-ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--Results] |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[aggregate-group_by_rollup_column_ref--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_desc--Results] [GOOD] >> test.py::test[dq-truncate_local-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_tablepath_column--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] >> test.py::test[optimizers-length_over_merge--Results] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--ForceBlocks] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] [GOOD] >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_bad_interval--Results] >> test.py::test[join-join_without_correlation_names--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_names--Results] >> test.py::test[pg-select_unionall_self-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_hop_bad_interval--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] >> test.py::test[like-like_clause_no_pattern-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] >> test.py::test[sampling-topsort-default.txt-Results] |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] >> test.py::test[join-equi_join_by_expr--Results] [GOOD] >> test.py::test[join-inner_all_right-off-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_yson--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] >> test.py::test[union_all-union_all_multiple-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] [GOOD] >> test.py::test[aggregate-no_compact_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-no_compact_distinct--Results] [SKIPPED] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-ForceBlocks] >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-decimal_avg--Results] >> test.py::test[type_v3-append_struct-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] >> test.py::test[order_by-SortByOneField--Results] >> test.py::test[order_by-order_by_tablepath_column--Results] [GOOD] >> test.py::test[join-join_and_distinct_key--Results] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--ForceBlocks] >> test.py::test[join-join_semi_correlation_in_order_by--Results] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] [GOOD] >> test.py::test[limit-limit_offset-default.txt-ForceBlocks] >> test.py::test[blocks-interval_add_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] |84.8%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q86-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Results] >> test.py::test[bigdate-table_int_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] >> test.py::test[schema-select_all_inferschema2--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] >> test.py::test[join-lookupjoin_bug7646_csee--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee--Results] >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] |84.8%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[blocks-add_uint16--Results] [GOOD] >> test.py::test[blocks-add_uint32--Results] >> test.py::test[join-join_without_correlation_names--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt--ForceBlocks] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] [GOOD] >> test.py::test[action-eval_skip_take--ForceBlocks] >> test.py::test[schema-insert_sorted-schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-schema-Results] >> test.py::test[table_range-range_over_desc--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_desc--Results] >> test.py::test[insert-part_sortness--Results] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_lookup--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] >> test.py::test[aggregate-group_by_expr_lookup--Results] >> test.py::test[dq-truncate_local-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-truncate_local-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail--ForceBlocks] >> test.py::test[blocks-interval_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-pg_tofrom--ForceBlocks] >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[tpch-q18-default.txt-Results] >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] >> test.py::test[schema-copy-other-ForceBlocks] [GOOD] >> test.py::test[schema-copy-other-Results] >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[join-pullup_extend--Results] >> test.py::test[pg-tpcds-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-Results] >> test.py::test[schema-insert_sorted-schema-Results] [GOOD] >> test.py::test[schema-select_fields_inferschema--ForceBlocks] >> test.py::test[union-union_multiin--Results] [GOOD] >> test.py::test[union-union_trivial-default.txt-Results] >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Results] >> test.py::test[join-lookupjoin_bug7646_csee--Results] [GOOD] >> test.py::test[join-lookupjoin_bug8533--ForceBlocks] |84.8%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[optimizers-nonselected_direct_row--ForceBlocks] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--Results] >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> test.py::test[table_range-range_over_desc--Results] [GOOD] >> test.py::test[tpch-q18-default.txt-ForceBlocks] >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[schema-append_to_desc_with_remap--Results] >> test.py::test[view-all_from_view--ForceBlocks] [GOOD] >> test.py::test[view-all_from_view--Results] >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-ForceBlocks] >> test.py::test[blocks-date_not_equals--ForceBlocks] [GOOD] >> test.py::test[blocks-date_not_equals--Results] >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> test.py::test[order_by-literal_desc--Results] [SKIPPED] >> test.py::test[order_by-literal_with_assume_desc--Results] [SKIPPED] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] >> test.py::test[schema-copy-other-Results] [GOOD] >> test.py::test[schema-copy-yamred_dsv_raw-ForceBlocks] >> test.py::test[pg-tpcds-q59-default.txt-Results] >> test.py::test[join-inner_all_right-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_all_right-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only--ForceBlocks] >> test.py::test[count-count--Results] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped--Results] >> test.py::test[pg-tpch-q16-default.txt-Results] >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[expr-non_persistable_group_by_some_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--ForceBlocks] >> test.py::test[order_by-order_by_tablerow_column--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--Results] >> test.py::test[pg-tpcds-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_with_rollup--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] >> test.py::test[blocks-add_uint32--Results] [GOOD] >> test.py::test[blocks-block_input_mapreduce--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2--Results] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] [GOOD] >> test.py::test[blocks-bitcast_block--ForceBlocks] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] >> test.py::test[blocks-block_input_various_types_2--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] >> test.py::test[blocks-block_output_various_types--Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg_filter--Results] >> test.py::test[limit-limit_offset-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-limit_offset-default.txt-Results] >> test.py::test[join-join_semi_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [SKIPPED] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] [GOOD] >> test.py::test[insert-yql-13083--Results] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] [GOOD] >> test.py::test[order_by-native_desc_sort_calc--Results] [SKIPPED] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-ForceBlocks] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[view-file_inner_library--ForceBlocks] >> test.py::test[bigdate-tz_table_rw--Results] >> test.py::test[join-left_join_right_pushdown_no_opt--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] >> test.py::test[key_filter-range_union--ForceBlocks] [GOOD] >> test.py::test[key_filter-range_union--Results] >> test.py::test[action-eval_skip_take--ForceBlocks] [GOOD] >> test.py::test[action-eval_skip_take--Results] >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> test.py::test[blocks-decimal_unary--Results] >> test.py::test[order_by-SortByTwoFields--ForceBlocks] >> test.py::test[order_by-order_by_tablerow_column--Results] [GOOD] >> test.py::test[params-complex_yson--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [SKIPPED] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-ForceBlocks] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs--Results] >> test.py::test[limit-limit_offset-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda--ForceBlocks] [SKIPPED] >> test.py::test[lineage-flatten_list_nested_lambda--Results] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[blocks-pg_tofrom--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] >> test.py::test[schema-select_fields_inferschema--ForceBlocks] [GOOD] >> test.py::test[schema-select_fields_inferschema--Results] >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] >> test.py::test[join-lookupjoin_bug8533--ForceBlocks] [GOOD] >> test.py::test[schema-append_to_desc_with_remap--Results] [GOOD] >> test.py::test[schema-insert-schema-Results] >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed [GOOD] >> test.py::test[join-lookupjoin_bug8533--Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test.py::test[action-eval_skip_take--Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-ForceBlocks] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[schema-copy-yamred_dsv_raw-ForceBlocks] [GOOD] >> test.py::test[schema-copy-yamred_dsv_raw-Results] >> test.py::test[join-pullup_extend--Results] [GOOD] >> test.py::test[join-pullup_inner--Results] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] >> test.py::test[schema-select_fields_inferschema--Results] [GOOD] >> test.py::test[select-append_to_value_1000--ForceBlocks] [SKIPPED] >> test.py::test[select-append_to_value_1000--Results] >> test.py::test[join-inner_on_key_only--ForceBlocks] [GOOD] >> test.py::test[tpch-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q18-default.txt-Results] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] [GOOD] >> test.py::test[aggregate-group_by_hop--ForceBlocks] >> test.py::test[file-parse_file_in_select_as_str--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--Results] >> test.py::test[select-append_to_value_1000--Results] [SKIPPED] >> test.py::test[select-opt_list_access-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-Results] >> test.py::test[join-inner_on_key_only--Results] >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull--Results] [SKIPPED] >> test.py::test[produce-fuse_reduces_with_presort--Results] >> test.py::test[key_filter-range_union--Results] [GOOD] >> test.py::test[key_filter-ranges--ForceBlocks] >> test.py::test[blocks-pg_tofrom--Results] [GOOD] >> test.py::test[blocks-tuple_nth--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-ForceBlocks] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] >> test.py::test[blocks-bitcast_block--ForceBlocks] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[view-file_inner_library--ForceBlocks] [GOOD] >> test.py::test[view-file_inner_library--Results] >> test.py::test[schema-copy-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-ForceBlocks] >> test.py::test[bigdate-tz_table_rw--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> test.py::test[join-lookupjoin_bug8533--Results] [GOOD] >> test.py::test[join-lookupjoin_bug8533-off-ForceBlocks] >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] >> test.py::test[count-count_all_grouped--Results] [GOOD] >> test.py::test[distinct-distinct_and_join--Results] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Results] >> test.py::test[window-distinct_over_window_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--Results] [GOOD] >> test.py::test[blocks-compare_dates_floats_bools--Results] [SKIPPED] >> test.py::test[blocks-decimal_op_decimal--Results] >> test.py::test[blocks-date_not_equals--Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--ForceBlocks] >> test.py::test[params-complex_yson--ForceBlocks] [GOOD] >> test.py::test[params-complex_yson--Results] >> test.py::test[order_by-SortByTwoFields--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] >> test.py::test[aggregate-no_compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-percentile_and_variance--Results] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] >> test.py::test[insert-yql-13083--Results] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in_in--Results] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[blocks-block_input_various_types--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-v3-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] >> test.py::test[view-file_inner_library--Results] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--ForceBlocks] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[join-inner_on_key_only--Results] [GOOD] >> test.py::test[join-left_cast_to_string-off-ForceBlocks] >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-ForceBlocks] >> test.py::test[udf-named_args_for_script_with_posargs--Results] [GOOD] >> test.py::test[union_all-infer_3-default.txt-Results] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] >> test.py::test[tpch-q18-default.txt-Results] [GOOD] >> test.py::test[type_v3-float--Results] >> test.py::test[params-complex_yson--Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-ForceBlocks] >> test.py::test[order_by-SortByTwoFields--Results] [GOOD] >> test.py::test[order_by-assume_over_input--ForceBlocks] >> test.py::test[schema-insert-schema-Results] [GOOD] >> test.py::test[select-braces-default.txt-Results] >> test.py::test[action-action_nested_query-default.txt-Results] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] >> test.py::test[pg-tpcds-q64-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] >> test.py::test[action-eval_unresolved_type_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort--Results] [GOOD] >> test.py::test[tpch-q18-default.txt-Results] [GOOD] >> test.py::test[tpch-q21-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_tuple-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-win_extract_members-default.txt-ForceBlocks] >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Results] >> test.py::test[aggregate-group_by_hop--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop--Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> test.py::test[select-opt_list_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Results] >> test.py::test[key_filter-ranges--ForceBlocks] [GOOD] >> test.py::test[key_filter-ranges--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T03:04:20.619559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:04:20.619581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.619586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:04:20.619590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:04:20.619594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:04:20.619597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:04:20.619605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:04:20.619617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:04:20.619694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:04:20.619752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:04:20.632258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T03:04:20.632276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:04:20.632340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.633677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:04:20.633706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:04:20.633725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:04:20.634641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:04:20.634679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:04:20.634761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.634795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:04:20.635180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.635377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.635386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.635430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:04:20.635437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.635444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:04:20.635467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T03:04:20.636941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T03:04:20.654907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:04:20.654969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.655024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:04:20.655087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:04:20.655098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.656404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.656434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:04:20.656483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.656492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:04:20.656496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:04:20.656501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:04:20.656960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.656976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:04:20.656982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:04:20.658484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.658500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.658505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.658513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:04:20.659049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:04:20.662977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:04:20.663015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:04:20.663181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:04:20.663205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:04:20.663212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.663268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:04:20.663276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:04:20.663306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:04:20.663317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:04:20.664771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:04:20.664782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:04:20.664830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:04:20.664834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:04:20.664841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:04:20.664846Z node 1 :FLAT_TX_SCHEMESHARD I ... AT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:08:34.196398Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:08:34.196515Z node 441 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.196522Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.196525Z node 441 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:08:34.196527Z node 441 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-05-05T03:08:34.196529Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T03:08:34.196535Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:08:34.197126Z node 441 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:08:34.197157Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197161Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:08:34.197166Z node 441 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197184Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:08:34.197201Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T03:08:34.197266Z node 441 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197280Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 1894080579691 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197311Z node 441 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197328Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197334Z node 441 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:08:34.197337Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:08:34.197341Z node 441 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:08:34.197343Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:08:34.197348Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:08:34.197354Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T03:08:34.197357Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:08:34.197361Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:08:34.197364Z node 441 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:08:34.197366Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:08:34.197372Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T03:08:34.197375Z node 441 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:08:34.197377Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T03:08:34.197380Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-05T03:08:34.197445Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.197456Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.197715Z node 441 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197721Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:08:34.197753Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T03:08:34.197770Z node 441 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:08:34.197773Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [441:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:08:34.197775Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [441:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:08:34.197859Z node 441 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.197867Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.197870Z node 441 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:08:34.197872Z node 441 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T03:08:34.197874Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:08:34.197930Z node 441 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.197934Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.197937Z node 441 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:08:34.197939Z node 441 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-05T03:08:34.197941Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T03:08:34.197946Z node 441 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:08:34.197949Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [441:125:2151] 2025-05-05T03:08:34.198011Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:08:34.198015Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T03:08:34.198021Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:08:34.198329Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.198574Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:08:34.198590Z node 441 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:08:34.198597Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:08:34.198602Z node 441 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:08:34.198604Z node 441 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:08:34.198607Z node 441 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T03:08:34.198643Z node 441 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:08:34.198868Z node 441 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T03:08:34.198910Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T03:08:34.198915Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T03:08:34.198958Z node 441 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T03:08:34.198968Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T03:08:34.198971Z node 441 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [441:980:2884] TestWaitNotification: OK eventTxId 1004 >> test.py::test[window-distinct_over_window_full_frames--Results] >> test.py::test[join-anyjoin_common_nodup--Results] [GOOD] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-count_bans--Results] >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] >> test.py::test[blocks-decimal_op_decimal--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] >> test.py::test[union_all-infer_3-default.txt-Results] [GOOD] >> test.py::test[view-file_inner--Results] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] [GOOD] >> test.py::test[action-eval_values_output_table_subquery--ForceBlocks] >> test.py::test[blocks-tuple_nth--ForceBlocks] [GOOD] >> test.py::test[blocks-tuple_nth--Results] >> test.py::test[join-lookupjoin_bug8533-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug8533-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi-off-ForceBlocks] >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-Results] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] |84.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross--Results] >> test.py::test[blocks-decimal_multiplicative_ops--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] >> test.py::test[column_group-hint-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_unk_col_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_unk_col_fail--Results] [SKIPPED] >> test.py::test[column_order-union_all-default.txt-ForceBlocks] >> test.py::test[schema-insert_sorted-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test.py::test[join-lookupjoin_with_cache-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_dup_key--ForceBlocks] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--ForceBlocks] >> test.py::test[join-pullup_inner--Results] [GOOD] >> test.py::test[join-pullup_inner-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber--Results] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> test.py::test[weak_field-weak_field_infer_scheme--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-on-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-on-Results] >> test.py::test[pg-tpcds-q61-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] >> test.py::test[key_filter-ranges--Results] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-ForceBlocks] >> test.py::test[join-left_cast_to_string-off-ForceBlocks] [GOOD] >> test.py::test[join-left_cast_to_string-off-Results] [SKIPPED] >> test.py::test[join-left_null_literal--ForceBlocks] >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] >> test.py::test[pg-tpcds-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Results] >> test.py::test[select-braces-default.txt-Results] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] >> test.py::test[type_v3-float--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] >> test.py::test[select-opt_list_access-default.txt-Results] [GOOD] >> test.py::test[select-sample_limit_recordindex--ForceBlocks] >> test.py::test[action-action_nested_query-default.txt-Results] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] >> test.py::test[order_by-order_with_null-default.txt-Results] [GOOD] >> test.py::test[order_by-union_all--Results] >> test.py::test[pg-tpcds-q84-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Results] [SKIPPED] >> test.py::test[pg-tpcds-q88-default.txt-Results] >> test.py::test[produce-process_multi_in--Results] >> test.py::test[binding-table_from_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min--Results] >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[order_by-assume_over_input--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_over_input--Results] >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[schema-limit_simple--ForceBlocks] >> test.py::test[blocks-tuple_nth--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Results] [SKIPPED] >> test.py::test[column_order-union_all_positional_unordered_fail--ForceBlocks] |84.9%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] [GOOD] >> test.py::test[window-current/session_extended--ForceBlocks] >> test.py::test[blocks-decimal_multiplicative_ops--Results] [GOOD] >> test.py::test[blocks-interval_div_scalar--ForceBlocks] >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q48-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-ForceBlocks] >> test.py::test[pg-doubles_search_path-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-Results] >> test.py::test[window-yql-14738-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-Results] >> test.py::test[distinct-distinct_and_join--Results] [GOOD] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] >> test.py::test[order_by-assume_over_input--Results] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--ForceBlocks] >> test.py::test[select-opt_list_access-default.txt-Results] [GOOD] >> test.py::test[select-refselect--Results] [SKIPPED] >> test.py::test[select-result_size_limit--Results] [SKIPPED] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] >> test.py::test[action-action_eval_cluster_use--ForceBlocks] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[produce-process_multi_in--Results] [SKIPPED] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[view-file_inner--Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] >> test.py::test[action-eval_values_output_table_subquery--ForceBlocks] [GOOD] >> test.py::test[action-eval_values_output_table_subquery--Results] >> test.py::test[join-mergejoin_force_no_sorted-off-ForceBlocks] >> test.py::test[distinct-distinct_star-default.txt-Results] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[column_order-union_all_positional_unordered_fail--ForceBlocks] [GOOD] >> test.py::test[column_order-union_all_positional_unordered_fail--Results] [GOOD] >> test.py::test[file-where_key_in_file_content_typed--ForceBlocks] >> test.py::test[join-mapjoin_dup_key--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] >> test.py::test[join-lookupjoin_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty--ForceBlocks] >> test.py::test[column_order-union_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-union_all-default.txt-Results] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[action-eval_values_output_table_subquery--Results] [GOOD] >> test.py::test[action-insert_after_eval--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Results] >> test.py::test[join-left_null_literal--ForceBlocks] [GOOD] >> test.py::test[join-left_null_literal--Results] >> test.py::test[tpch-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q21-default.txt-Results] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] >> test.py::test[join-premap_map_cross--Results] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_map_inner--Results] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] [GOOD] >> test.py::test[select-literal_negative-default.txt-Results] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> test.py::test[window-win_extract_members-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_extract_members-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] [SKIPPED] >> test.py::test[join-pullup_exclusion-off-ForceBlocks] >> test.py::test[pg-tpcds-q88-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] >> test.py::test[blocks-distinct_mixed_all--Results] [GOOD] >> test.py::test[blocks-filter_expr--Results] >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[select-sample_limit_recordindex--ForceBlocks] [GOOD] >> test.py::test[select-sample_limit_recordindex--Results] >> test.py::test[column_order-union_all-default.txt-Results] [GOOD] >> test.py::test[expr-langver--ForceBlocks] >> test.py::test[schema-limit_simple--ForceBlocks] [GOOD] >> test.py::test[schema-limit_simple--Results] >> test.py::test[aggregate-group_by_rollup_column_reuse--ForceBlocks] [GOOD] |84.9%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q48-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[pg-tpcds-q48-default.txt-Results] >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Results] >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> test.py::test[key_filter-empty_range_over_dynamic--Results] >> test.py::test[join-left_null_literal--Results] [GOOD] >> test.py::test[join-left_null_literal-off-ForceBlocks] >> test.py::test[join-pullup_rownumber--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-Results] [SKIPPED] >> test.py::test[schema-select_all_inferschema-extra_field-ForceBlocks] >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-ForceBlocks] >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-Results] >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[schema-select_all_inferschema--ForceBlocks] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--ForceBlocks] >> test.py::test[select-sample_limit_recordindex--Results] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_min--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--Results] >> test.py::test[pg-tpcds-q48-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-ForceBlocks] >> test.py::test[pg-join_using_tables1-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-Results] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_struct-default.txt-ForceBlocks] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[window-win_extract_members-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-ForceBlocks] >> test.py::test[blocks-interval_div_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_div_scalar--Results] >> test.py::test[action-action_eval_cluster_use--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_use--Results] >> test.py::test[join-mergejoin_force_no_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link--Results] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column--ForceBlocks] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_empty--Results] >> test.py::test[file-where_key_in_file_content_typed--ForceBlocks] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] >> test.py::test[file-where_key_in_file_content_typed--Results] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Results] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test.py::test[join-bush_in--ForceBlocks] >> test.py::test[type_v3-ignore_v3_pragma--Results] [GOOD] >> test.py::test[udf-python_script--Results] >> test.py::test[pg-tpch-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-Results] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] [GOOD] >> test.py::test[window-full/syscolumns--Results] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--ForceBlocks] >> test.py::test[join-count_bans--Results] [GOOD] >> test.py::test[join-equi_join_three_simple-off-Results] [SKIPPED] >> test.py::test[join-full_trivial_udf_call--Results] >> test.py::test[select-literal_negative-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--Results] >> test.py::test[window-current/session_extended--ForceBlocks] [GOOD] >> test.py::test[window-current/session_extended--Results] >> test.py::test[action-action_eval_cluster_use--Results] [GOOD] >> test.py::test[action-eval_drop--ForceBlocks] |84.9%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-lookupjoin_semi_empty--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-ForceBlocks] >> test.py::test[expr-langver--ForceBlocks] [GOOD] >> test.py::test[expr-langver--Results] >> test.py::test[join-pushdown_filter_over_left--ForceBlocks] >> test.py::test[join-premap_map_inner--Results] [GOOD] >> test.py::test[join-premap_merge_extrasort2-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber-off-Results] [SKIPPED] >> test.py::test[join-simple_columns_partial-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted--Results] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren--Results] >> test.py::test[blocks-filter_expr--Results] [GOOD] >> test.py::test[blocks-interval_add_interval--Results] >> test.py::test[file-where_key_in_file_content_typed--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--ForceBlocks] >> test.py::test[window-distinct_over_window_full_frames--Results] [GOOD] >> test.py::test[window-empty/aggregations--ForceBlocks] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] >> test.py::test[blocks-interval_div_scalar--Results] [GOOD] >> test.py::test[blocks-minmax_tuple--ForceBlocks] >> test.py::test[action-insert_after_eval--ForceBlocks] [GOOD] >> test.py::test[action-insert_after_eval--Results] >> test.py::test[join-left_null_literal-off-ForceBlocks] [GOOD] >> test.py::test[join-left_null_literal-off-Results] [SKIPPED] >> test.py::test[join-left_only_semi_and_other--ForceBlocks] >> test.py::test[aggregate-group_by_tablerow_column--ForceBlocks] >> test.py::test[select-select_all_from_concat-default.txt-Results] [GOOD] >> test.py::test[select-sum_to_string-default.txt-Results] >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[order_by-literal_desc--ForceBlocks] >> test.py::test[schema-select_all_inferschema-extra_field-ForceBlocks] [GOOD] >> test.py::test[expr-langver--Results] [GOOD] >> test.py::test[file-file_constness--ForceBlocks] >> test.py::test[schema-select_all_inferschema-extra_field-Results] >> test.py::test[schema-select_all_inferschema--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema--Results] >> test.py::test[like-like_clause_escape-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Results] >> test.py::test[pg-tpcds-q47-default.txt-Results] >> test.py::test[pg-select_limit-default.txt-Results] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-Results] >> test.py::test[select-select_all_from_concat-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-Results] >> test.py::test[join-pullup_exclusion-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[blocks-combine_all_sum_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_pg--Results] >> test.py::test[schema-select_all_inferschema-extra_field-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--ForceBlocks] >> test.py::test[action-insert_after_eval--Results] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--ForceBlocks] >> test.py::test[distinct-distinct_count_only-default.txt-Results] [GOOD] >> test.py::test[udf-python_script--Results] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Results] >> test.py::test[udf-udaf--Results] >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q19-default.txt-Results] >> test.py::test[optimizers-yql-6008_limit_after_map--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[window-current/session_extended--Results] [GOOD] >> test.py::test[window-full/aggregations--ForceBlocks] >> test.py::test[select-sampleselect--Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] >> test.py::test[pg-tpch-q09-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream--ForceBlocks] >> test.py::test[select-select_all_from_concat-default.txt-Results] [GOOD] >> test.py::test[select-struct_access_without_table_name--ForceBlocks] >> test.py::test[like-like_clause_escape-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_sort_limit--ForceBlocks] [SKIPPED] >> test.py::test[limit-dynamic_sort_limit--Results] [SKIPPED] >> test.py::test[limit-empty_input_after_limit-default.txt-ForceBlocks] >> test.py::test[produce-reduce_all_list_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_subfields--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] |84.9%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_left_null_column--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_left_null_column--Results] >> test.py::test[pg-tpch-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] >> test.py::test[action-eval_drop--ForceBlocks] [GOOD] >> test.py::test[action-eval_drop--Results] >> test.py::test[select-where_in-default.txt-Results] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Results] >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> test.py::test[type_v3-append_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[type_v3-append_struct-default.txt-Results] >> test.py::test[join-bush_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in--Results] >> test.py::test[join-pushdown_filter_over_left--ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with_legacy--Results] >> test.py::test[select-sum_to_string-default.txt-Results] [GOOD] >> test.py::test[select-table_content_with_tmp_folder--Results] >> test.py::test[action-mixed_eval_typeof_world1--ForceBlocks] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-ForceBlocks] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> test.py::test[order_by-literal_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_desc--Results] >> test.py::test[aggregate-list_with_fold_map--Results] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--ForceBlocks] >> test.py::test[blocks-interval_add_interval--Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test.py::test[aggregate-group_by_tablerow_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Results] >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[column_order-select_action-default.txt-Results] >> test.py::test[action-eval_drop--Results] [GOOD] >> test.py::test[action-pending_arg_fail--ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_basic--Results] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap--ForceBlocks] >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel--Results] [SKIPPED] >> test.py::test[dq-truncate_local-default.txt-Results] [SKIPPED] >> test.py::test[expr-inline_call--Results] >> test.py::test[flatten_by-flatten_by_opt_dict--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] >> test.py::test[join-full_trivial_udf_call--Results] [GOOD] >> test.py::test[join-grace_join2--Results] [SKIPPED] >> test.py::test[join-inner_all_right-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial--Results] >> test.py::test[schema-insert_sorted-read_schema-Results] >> test.py::test[join-left_only_semi_and_other--ForceBlocks] [GOOD] >> test.py::test[join-left_only_semi_and_other--Results] >> test.py::test[aggregate-group_by_session_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] >> test.py::test[file-file_constness--ForceBlocks] [GOOD] >> test.py::test[file-file_constness--Results] >> test.py::test[join-mergejoin_left_null_column--Results] [GOOD] >> test.py::test[join-mergejoin_left_null_column-off-ForceBlocks] >> test.py::test[type_v3-append_struct-default.txt-Results] [GOOD] >> test.py::test[type_v3-singulars--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-singulars--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script--ForceBlocks] >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[join-trivial_view-off-Results] >> test.py::test[pg-select_table2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-Results] >> test.py::test[order_by-literal_desc--Results] [GOOD] >> test.py::test[order_by-native_desc_assume_with_transform--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_assume_with_transform--Results] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc--Results] [SKIPPED] >> test.py::test[order_by-order_by_dynum_desc-default.txt-ForceBlocks] >> test.py::test[join-trivial_view-off-Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] >> test.py::test[window-win_func_first_last_with_part--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] >> test.py::test[pg-tpch-q15-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_in--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_in--Results] >> test.py::test[pg-tpcds-q47-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-ForceBlocks] >> test.py::test[window-empty/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[produce-reduce_lambda--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--ForceBlocks] >> test.py::test[blocks-minmax_tuple--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_tuple--Results] >> test.py::test[action-pending_arg_fail--ForceBlocks] [GOOD] >> test.py::test[action-pending_arg_fail--Results] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_tablerow_column--Results] [GOOD] >> test.py::test[aggregate-group_by_tz_date--ForceBlocks] >> test.py::test[join-pushdown_filter_over_left--Results] [GOOD] >> test.py::test[join-star_join_mirror--ForceBlocks] >> test.py::test[schema-select_all_inferschema_range--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] >> test.py::test[join-bush_in--Results] [GOOD] >> test.py::test[join-bush_in-off-ForceBlocks] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-ForceBlocks] >> test.py::test[blocks-distinct_opt_state_all--Results] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] >> test.py::test[select-where_in-default.txt-Results] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[pg-tpch-q19-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] >> test.py::test[join-star_join_inners_vk_sorted--Results] [GOOD] >> test.py::test[join-star_join_with_diff_complex_key--Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[select-struct_access_without_table_name--ForceBlocks] [GOOD] >> test.py::test[select-struct_access_without_table_name--Results] >> test.py::test[file-file_constness--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_usage--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage--Results] [SKIPPED] >> test.py::test[in-basic_in-default.txt-ForceBlocks] >> test.py::test[limit-empty_input_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] >> test.py::test[expr-inline_call--Results] [GOOD] >> test.py::test[file-where_key_in_file_content_typed--Results] >> test.py::test[udf-udaf--Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] >> test.py::test[select-trivial_having-default.txt-Results] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] [GOOD] >> test.py::test[schema-select_field-read_schema-ForceBlocks] >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[schema-insert_sorted-read_schema-Results] [GOOD] >> test.py::test[schema-select_field-read_schema-Results] >> test.py::test[action-nested_rewrite_io-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] [GOOD] >> test.py::test[produce-process_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] >> test.py::test[select-struct_access_without_table_name--Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_distinct--Results] [GOOD] >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag--ForceBlocks] >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> test.py::test[aggregate-percentile_and_variance--ForceBlocks] >> test.py::test[column_order-select_action-default.txt-Results] [GOOD] >> test.py::test[count-count_all-default.txt-Results] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Results] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] >> test.py::test[aggr_factory-avg-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByOneField--Results] >> test.py::test[pg-tpcds-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] >> test.py::test[window-full/syscolumns--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] >> test.py::test[join-inner_trivial--Results] [GOOD] >> test.py::test[join-left_null_literal-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o--Results] >> test.py::test[window-full/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations--Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] >> test.py::test[pg-tpcds-q83-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-list_with_fold_map--Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] >> test.py::test[multicluster-basic-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-basic-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group_one_of_multi--ForceBlocks] >> test.py::test[hor_join-sorted_out_mix--Results] [SKIPPED] >> test.py::test[hor_join-yql19332_aux_cols--Results] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] >> test.py::test[udf-named_args_for_script--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script--Results] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-Results] >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test.py::test[join-left_only_with_other-off-ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[blocks-pg_call--Results] >> test.py::test[join-mergejoin_left_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested--ForceBlocks] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] >> test.py::test[action-subquery_opt_args-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Results] >> test.py::test[blocks-minmax_tuple--Results] [GOOD] >> test.py::test[blocks-pg--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-ForceBlocks] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] >> test.py::test[aggregate-group_by_tz_date--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_tz_date--Results] >> test.py::test[bigdate-tz_table_pull--Results] >> test.py::test[blocks-interval_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] [GOOD] >> test.py::test[view-file_inner_udf--Results] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns--Results] >> test.py::test[produce-reduce_multi_in--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in--Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--ForceBlocks] >> test.py::test[pg-tpcds-q83-default.txt-Results] [GOOD] >> test.py::test[pragma-config_exec--ForceBlocks] >> test.py::test[udf-named_args_for_script--Results] [GOOD] >> test.py::test[union-union_trivial-default.txt-ForceBlocks] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] [GOOD] >> test.py::test[join-yql-19081--Results] [SKIPPED] >> test.py::test[join-yql-8125--Results] >> test.py::test[action-subquery_opt_args-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] >> test.py::test[file-where_key_in_file_content_typed--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] >> test.py::test[in-basic_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-basic_in-default.txt-Results] >> test.py::test[schema-select_field-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-read_schema-Results] >> test.py::test[join-bush_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval-off-ForceBlocks] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] [GOOD] >> test.py::test[order_by-literal_complex--ForceBlocks] >> test.py::test[join-premap_common_multiparents_no_premap--Results] [GOOD] >> test.py::test[join-pullup_rownumber--ForceBlocks] >> test.py::test[window-win_func_rank_by_opt_part--Results] >> test.py::test[join-star_join_mirror--ForceBlocks] [GOOD] >> test.py::test[join-star_join_mirror--Results] >> test.py::test[schema-select_field-read_schema-Results] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] |85.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[count-count_all-default.txt-Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-Results] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] [GOOD] >> test.py::test[schema-select_field-read_schema-Results] [GOOD] >> test.py::test[in-in_compact_distinct--ForceBlocks] >> test.py::test[schema-user_schema_append--ForceBlocks] >> test.py::test[produce-fuse_reduces_with_presort--Results] [GOOD] >> test.py::test[produce-process_multi_in_single_out--Results] [SKIPPED] >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python-default.txt-Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] >> test.py::test[in-basic_in-default.txt-Results] [GOOD] >> test.py::test[in-yql-10038-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in--Results] [GOOD] >> test.py::test[produce-reduce_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_and_variance--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] >> test.py::test[blocks-combine_hashed_pg--Results] [GOOD] >> test.py::test[bigdate-tz_table_pull--Results] [GOOD] >> test.py::test[aggregate-percentile_and_variance--Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] >> test.py::test[blocks-combine_hashed_sum--Results] >> test.py::test[binding-insert_binding--Results] >> test.py::test[aggregate-GroupByOneField--Results] [GOOD] >> test.py::test[blocks-pg--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] >> test.py::test[blocks-pg--Results] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] [GOOD] >> test.py::test[join-left_only_with_other-off-ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] >> test.py::test[window-full/aggregations--Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_append--ForceBlocks] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable--Results] >> test.py::test[order_by-order_by_expr_mul_cols--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] >> test.py::test[window-full/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[hor_join-yql19332_aux_cols--Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] [SKIPPED] >> test.py::test[pragma-config_exec--ForceBlocks] [GOOD] >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[join-star_join_mirror--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--ForceBlocks] >> test.py::test[schema-select_simple-default.txt-Results] [GOOD] >> test.py::test[union-union_trivial-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] [GOOD] >> test.py::test[order_by-literal_complex--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-pullup_rownumber--ForceBlocks] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[aggregate-rollup_with_dict--ForceBlocks] >> test.py::test[aggregate-group_by_with_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> test.py::test[produce-process_with_python-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] [GOOD] >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> test.py::test[blocks-partial_blocks1--Results] >> test.py::test[join-equi_join_three_asterisk_eval-off-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_subfields-sorted-Results] [SKIPPED] >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> test.py::test[binding-insert_binding--Results] [GOOD] >> test.py::test[window-full/session--ForceBlocks] >> test.py::test[produce-reduce_with_python_few_keys--Results] [SKIPPED] >> test.py::test[weak_field-weak_field_infer_scheme--Results] >> test.py::test[select-from_in_front-default.txt-ForceBlocks] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] >> test.py::test[ql_filter-integer_eval--Results] >> test.py::test[window-full/aggregations_leadlag--Results] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] >> test.py::test[join-starjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-yql-8980-off-Results] >> test.py::test[order_by-literal_complex--Results] >> test.py::test[pg-tpcds-q41-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] >> test.py::test[join-pullup_rownumber--Results] >> test.py::test[produce-reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Results] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] >> test.py::test[window-win_func_first_last--Results] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[blocks-pg_call--Results] [GOOD] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-ForceBlocks] >> test.py::test[join-equi_join_three_asterisk_eval-off-Results] [SKIPPED] >> test.py::test[join-full_equal_not_null--ForceBlocks] >> test.py::test[binding-table_concat_binding-default.txt-Results] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] >> test.py::test[order_by-order_by_expr_mul_cols--Results] [GOOD] >> test.py::test[join-yql-8980-off-Results] [SKIPPED] >> test.py::test[union-union_trivial-default.txt-Results] >> test.py::test[aggr_factory-bitxor-default.txt-Results] >> test.py::test[pragma-config_exec--Results] >> test.py::test[in-in_compact_distinct--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_complex--Results] [GOOD] >> test.py::test[in-yql-10038-default.txt-ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_eval--Results] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[select-from_in_front-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] [GOOD] >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] [GOOD] >> test.py::test[join-pullup_rownumber--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum--Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[blocks-complex_scalars--Results] >> test.py::test[bigdate-table_int_cast-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] [GOOD] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[count-count_nullable--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] [SKIPPED] >> test.py::test[binding-table_concat_binding-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt-Results] [GOOD] >> test.py::test[join-yql_465-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q07-default.txt-ForceBlocks] >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[pragma-config_exec--Results] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] >> test.py::test[key_filter-contains_optional--Results] >> test.py::test[in-in_compact_distinct--Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--ForceBlocks] >> test.py::test[pg-tpcds-q69-default.txt-Results] >> test.py::test[in-yql-10038-default.txt-Results] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[limit-zero_limit-default.txt-Results] >> test.py::test[select-from_in_front-default.txt-Results] >> test.py::test[tpch-q19-default.txt-Results] >> test.py::test[join-yql-8125-off-Results] [SKIPPED] >> test.py::test[key_filter-between_with_key_filter--Results] >> test.py::test[flatten_by-flatten_expr_join--Results] [SKIPPED] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] >> test.py::test[weak_field-weak_field_long_fields--Results] >> test.py::test[join-selfjoin_on_sorted_with_rename--ForceBlocks] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[bigdate-tz_table_yt_key_filter--Results] [SKIPPED] >> test.py::test[blocks-add_int64--Results] >> test.py::test[schema-skip_complex_type--Results] >> test.py::test[pg-tpcds-q53-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_fail--Results] >> test.py::test[aggr_factory-bitxor-default.txt-Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--Results] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append--Results] >> test.py::test[action-nested_subquery--Results] >> test.py::test[union_all-mix_map_and_read-default.txt-ForceBlocks] >> test.py::test[in-yql-10038-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta_with_read_udf_fail--ForceBlocks] >> test.py::test[pragma-release_temp_data_chain_pull--ForceBlocks] [SKIPPED] >> test.py::test[in-in_compact_distinct--Results] [GOOD] >> test.py::test[key_filter-contains_optional--Results] [GOOD] >> test.py::test[ql_filter-integer_single_equals--Results] >> test.py::test[aggregate-library_error_in_aggregation_fail--ForceBlocks] >> test.py::test[aggr_factory-min_by-default.txt-ForceBlocks] >> test.py::test[select-from_in_front-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int16--Results] >> test.py::test[join-lookupjoin_semi_subq-off-Results] [SKIPPED] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] >> test.py::test[aggregate-library_error_in_aggregation_fail--ForceBlocks] [GOOD] >> test.py::test[select-match_clause--ForceBlocks] >> test.py::test[insert-override-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-override-with_view-Results] [SKIPPED] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-ForceBlocks] >> test.py::test[pragma-release_temp_data_chain_pull--Results] [SKIPPED] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_not_null--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-ForceBlocks] [GOOD] >> test.py::test[aggregate-rollup_with_dict--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_not_null--Results] >> test.py::test[ql_filter-integer_single_equals--Results] [GOOD] >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] >> test.py::test[produce-process_lambda_opt_args-default.txt-ForceBlocks] >> test.py::test[window-win_func_over_group_by_list_names--Results] >> test.py::test[pg-tpch-q09-default.txt-Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q07-default.txt-Results] >> test.py::test[bigdate-table_common_type-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-Results] [SKIPPED] >> test.py::test[aggregate-rollup_with_dict--Results] >> test.py::test[sampling-reduce--Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn-off-ForceBlocks] >> test.py::test[sampling-system_sampling--Results] [SKIPPED] >> test.py::test[schema-append_to_desc--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_with_read_udf_fail--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] >> test.py::test[insert-keepmeta_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_long_fields--Results] [GOOD] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[window-distinct_over_window--Results] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_not_null--Results] [GOOD] >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type--Results] [GOOD] >> test.py::test[schema-user_schema_mix1--Results] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--Results] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-ForceBlocks] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] [GOOD] >> test.py::test[blocks-complex_scalars--Results] [GOOD] >> test.py::test[aggregate-rollup_with_dict--Results] [GOOD] >> test.py::test[blocks-add_uint64--ForceBlocks] >> test.py::test[window-full/session--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> test.py::test[select-match_clause--ForceBlocks] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[blocks-add_int16--Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] >> test.py::test[optimizers-yql-17413-topsort--Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q18-default.txt-ForceBlocks] >> test.py::test[schema-append_to_desc--Results] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-generic/session--ForceBlocks] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] >> test.py::test[pg-tpcds-q54-default.txt-Results] >> test.py::test[insert-trivial_literals-default.txt-Results] >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] >> test.py::test[result_types-containers-default.txt-Results] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> test.py::test[join-full_join--ForceBlocks] >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[optimizers-sort_constraint_in_left--Results] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] >> test.py::test[blocks-add_uint64--Results] >> test.py::test[order_by-order_by_expr_simple--ForceBlocks] >> test.py::test[blocks-date_sub--Results] >> test.py::test[window-full/session--Results] >> test.py::test[produce-reduce_lambda_list_table--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype--Results] >> test.py::test[select-match_clause--Results] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] >> test.py::test[schema-copy-yamred_dsv_raw-Results] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] >> test.py::test[hor_join-group_ranges--Results] >> test.py::test[blocks-add_int32--Results] >> test.py::test[optimizers-yql-9297_publish_ytcopy--ForceBlocks] >> test.py::test[file-file_list_simple--Results] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-ForceBlocks] >> test.py::test[produce-process_multi_in_single_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_in_single_out--Results] [SKIPPED] >> test.py::test[produce-process_with_python_stream-empty-ForceBlocks] >> test.py::test[select-match_clause--Results] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-Results] [GOOD] >> test.py::test[join-no_empty_join_for_dyn-off-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-Results] >> test.py::test[table_range-concat_sorted_max_tables--Results] [GOOD] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_mix1--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q18-default.txt-ForceBlocks] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [GOOD] >> test.py::test[pg-tpcds-q18-default.txt-Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64--Results] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-ForceBlocks] >> test.py::test[result_types-containers-default.txt-Results] [GOOD] >> test.py::test[join-star_join_semionly-off-ForceBlocks] >> test.py::test[insert_monotonic-to_empty--ForceBlocks] >> test.py::test[pg-tpcds-q57-default.txt-Results] >> test.py::test[join-no_empty_join_for_dyn-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner-off-ForceBlocks] >> test.py::test[tpch-q12-default.txt-Results] >> test.py::test[key_filter-mixed_opt_bounds--Results] [SKIPPED] >> test.py::test[key_filter-multiusage--Results] >> test.py::test[select-autoextract_source_value-default.txt-Results] >> test.py::test[bigdate-table_common_type-default.txt-Results] >> test.py::test[insert-select_operate_with_columns--Results] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--ForceBlocks] >> test.py::test[blocks-combine_all_max--Results] >> test.py::test[sampling-sort-default.txt-Results] >> test.py::test[join-star_join_semionly_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-Results] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] |85.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test.py::test[join-full_join--ForceBlocks] [GOOD] >> test.py::test[join-full_join--Results] >> test.py::test[join-star_join_semionly_premap-off-Results] [SKIPPED] >> test.py::test[join-yql-12022--ForceBlocks] >> test.py::test[blocks-add_int32--Results] [GOOD] >> test.py::test[blocks-add_int8--Results] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] >> test.py::test[schema-copy-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] >> test.py::test[blocks-add_uint64--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64--Results] >> test.py::test[pg-tpcds-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-ForceBlocks] >> test.py::test[union_all-path_and_record-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-Results] >> test.py::test[produce-process_with_python_stream-empty-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_python_stream-empty-Results] >> test.py::test[join-star_join_semionly-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly-off-Results] >> test.py::test[aggr_factory-min_by-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--ForceBlocks] >> test.py::test[join-star_join_semionly-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-ForceBlocks] >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--ForceBlocks] >> test.py::test[file-file_list_simple--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Results] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> test.py::test[optimizers-sort_constraint_in_left--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] >> test.py::test[select-one_labeled_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Results] >> test.py::test[blocks-add_uint64--Results] [GOOD] >> test.py::test[blocks-combine_hashed_max--ForceBlocks] >> test.py::test[hor_join-group_ranges--Results] [GOOD] >> test.py::test[hor_join-yield_on-default.txt-Results] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[order_by-sort_with_take_limit--ForceBlocks] >> test.py::test[aggr_factory-multi--Results] >> test.py::test[produce-process_with_python_stream-empty-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-ForceBlocks] >> test.py::test[join-premap_common_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] [GOOD] >> test.py::test[insert-trivial_select-default.txt-Results] >> test.py::test[join-premap_context_dep--ForceBlocks] >> test.py::test[join-full_join--Results] [GOOD] >> test.py::test[join-join_comp_map_table--ForceBlocks] >> test.py::test[select-autoextract_source_value-default.txt-Results] [GOOD] >> test.py::test[select-bit_ops-default.txt-Results] >> test.py::test[union_all-path_and_record-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] >> test.py::test[window-generic/session--ForceBlocks] [GOOD] >> test.py::test[window-generic/session--Results] >> test.py::test[bigdate-table_common_type-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Results] [SKIPPED] >> test.py::test[blocks-combine_all_pg--ForceBlocks] >> test.py::test[select-one_labeled_column-default.txt-Results] [GOOD] >> test.py::test[tpch-q7-default.txt-ForceBlocks] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test.py::test[insert_monotonic-to_empty--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-to_empty--Results] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] >> test.py::test[blocks-add_int8--Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter--Results] >> test.py::test[window-distinct_over_window--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] >> test.py::test[sampling-sort-default.txt-Results] [GOOD] >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] >> test.py::test[pg-tpcds-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-Results] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] >> test.py::test[tpch-q12-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] >> test.py::test[insert_monotonic-to_empty--Results] [GOOD] >> test.py::test[join-anyjoin_common_nodup-off-ForceBlocks] >> test.py::test[join-yql-12022--ForceBlocks] [GOOD] >> test.py::test[join-yql-12022--Results] >> test.py::test[tpch-q22-default.txt-Results] >> test.py::test[window-win_func_over_group_by_list_names--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] >> test.py::test[blocks-combine_all_max--Results] [GOOD] >> test.py::test[key_filter-multiusage--Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] >> test.py::test[key_filter-nile_pred--Results] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test.py::test[aggregate-group_by_expr_dict--Results] >> test.py::test[file-where_key_in_get_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] >> test.py::test[pg-tpcds-q21-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q47-default.txt-ForceBlocks] >> test.py::test[join-three_equalities_paren-off-ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] >> test.py::test[aggregate-avg_and_sum_float--ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] >> test.py::test[optimizers-group_visit_lambdas--Results] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[udf-two_regexps--Results] >> test.py::test[insert-trivial_select-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--Results] >> test.py::test[insert-anonymous_tables-default.txt-Results] >> test.py::test[action-eval_anon_table--Results] >> test.py::test[weak_field-weak_field_strict--Results] >> test.py::test[key_filter-utf8_with_legacy--ForceBlocks] >> test.py::test[blocks-sub_uint64_opt2--ForceBlocks] >> test.py::test[select-one_unlabeled_column-default.txt-Results] >> test.py::test[blocks-combine_hashed_max--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] >> test.py::test[window-generic/session--Results] [GOOD] >> test.py::test[window-lagging/aggregations--ForceBlocks] >> test.py::test[select-bit_ops-default.txt-Results] [GOOD] >> test.py::test[select-calculated_values-default.txt-Results] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] [GOOD] >> test.py::test[order_by-order_by_dot_column-default.txt-ForceBlocks] >> test.py::test[produce-process_with_udf_validate-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] >> test.py::test[aggregate-group_by_hop_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_list_key--ForceBlocks] >> test.py::test[join-star_join_inners--Results] >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] >> test.py::test[join-left_cast_to_string-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] >> test.py::test[join-premap_context_dep--ForceBlocks] [GOOD] >> test.py::test[join-premap_context_dep--Results] >> test.py::test[hor_join-yield_on-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_join--Results] |85.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[order_by-sort_with_take_limit--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take_limit--Results] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-select_all_inferschema--Results] >> test.py::test[union_all-union_all_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-yql-12022--Results] [GOOD] >> test.py::test[join-yql-14829_left--ForceBlocks] >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] >> test.py::test[blocks-combine_all_count_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] >> test.py::test[produce-reduce_with_python_row_repack--ForceBlocks] >> test.py::test[produce-reduce_with_python_row_repack--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[ql_filter-integer_many_right--ForceBlocks] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--ForceBlocks] >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--Results] [GOOD] >> test.py::test[join-cbo_7tables--Results] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-equi_join_by_expr--Results] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[union_all-union_all_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys--ForceBlocks] >> test.py::test[order_by-sort_with_take_limit--Results] [GOOD] >> test.py::test[pg-all_data--ForceBlocks] >> test.py::test[blocks-date_sub--Results] [GOOD] >> test.py::test[blocks-div_uint64--Results] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[key_filter-nile_pred--Results] [GOOD] >> test.py::test[key_filter-range_union--Results] >> test.py::test[join-premap_context_dep--Results] [GOOD] >> test.py::test[join-premap_context_dep-off-ForceBlocks] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[tpch-q7-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] >> test.py::test[tpch-q7-default.txt-Results] >> test.py::test[pg-tpch-q14-default.txt-Results] >> test.py::test[action-eval_anon_table--Results] [GOOD] >> test.py::test[action-eval_folder_via_file--Results] >> test.py::test[blocks-combine_all_pg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_pg--Results] >> test.py::test[join-join_comp_map_table--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_map_table--Results] >> test.py::test[select-one_unlabeled_column-default.txt-Results] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] >> test.py::test[weak_field-weak_field_strict--Results] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-Results] >> test.py::test[udf-two_regexps--Results] [GOOD] >> test.py::test[view-system_udf--Results] |85.2%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-sub_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-sub_uint64_opt2--Results] >> test.py::test[insert-anonymous_tables-default.txt-Results] [GOOD] >> test.py::test[insert-fail_read_view_after_modify--Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_nested--Results] >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[schema-select_reordered-default.txt-Results] >> test.py::test[select-calculated_values-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] >> test.py::test[aggregate-group_by_hop_list_key--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_dot_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_zero_delay--ForceBlocks] >> test.py::test[pg-tpcds-q47-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodup-off-ForceBlocks] [GOOD] >> test.py::test[optimizers-group_visit_lambdas--Results] [GOOD] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_many_right--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_right--Results] >> test.py::test[aggregate-group_by_expr_dict--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Results] >> test.py::test[join-yql-14829_left--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_left--Results] >> test.py::test[blocks-sub_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--ForceBlocks] >> test.py::test[tpch-q7-default.txt-Results] [GOOD] >> test.py::test[udf-regexp_udf--ForceBlocks] >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] >> test.py::test[blocks-div_uint64--Results] [GOOD] >> test.py::test[blocks-exists--Results] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] >> test.py::test[insert-fail_read_view_after_modify--Results] [GOOD] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[pg-tpcds-q47-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta-with_view-Results] [SKIPPED] >> test.py::test[insert-override-proto-Results] >> test.py::test[window-win_func_lead_lag_opt--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--ForceBlocks] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[key_filter-utf8_with_legacy--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[key_filter-utf8_with_legacy--Results] |85.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part14/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[action-eval_folder_via_file--Results] [GOOD] >> test.py::test[action-eval_pragma--Results] >> test.py::test[udf-named_args_for_script_with_posargs--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--Results] >> test.py::test[join-left_semi_with_other-off-ForceBlocks] >> test.py::test[ql_filter-integer_many_right--Results] [GOOD] >> test.py::test[ql_filter-integer_members--ForceBlocks] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] [GOOD] >> test.py::test[select-struct_members-default.txt-Results] >> test.py::test[join-premap_context_dep-off-ForceBlocks] [GOOD] >> test.py::test[window-lagging/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-lagging/aggregations--Results] >> test.py::test[join-premap_context_dep-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross-off-ForceBlocks] >> test.py::test[tpch-q22-default.txt-Results] [GOOD] >> test.py::test[tpch-q3-default.txt-Results] >> test.py::test[weak_field-weak_field_type-default.txt-Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag--Results] >> test.py::test[blocks-combine_hashed_set--Results] [GOOD] >> test.py::test[blocks-combine_hashed_some--Results] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] >> test.py::test[blocks-combine_all_pg--Results] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--ForceBlocks] >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> test.py::test[join-equi_join_by_expr--Results] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Results] [SKIPPED] >> test.py::test[join-filter_joined--Results] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[join-join_comp_map_table--Results] [GOOD] >> test.py::test[join-join_comp_map_table-off-ForceBlocks] >> test.py::test[schema-select_reordered-default.txt-Results] [GOOD] >> test.py::test[schema-select_yamr_fields--Results] >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] >> test.py::test[pg-all_data--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[pg-all_data--Results] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-Results] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_join--ForceBlocks] [SKIPPED] >> test.py::test[flatten_by-flatten_expr_join--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_mode-default.txt-ForceBlocks] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_optional--ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--Results] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[key_filter-range_union--Results] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-Results] >> test.py::test[join-star_join--ForceBlocks] >> test.py::test[lineage-window_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--Results] [SKIPPED] >> test.py::test[order_by-literal_with_assume--ForceBlocks] >> test.py::test[aggregate-group_by_hop_zero_delay--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_zero_delay--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--ForceBlocks] >> test.py::test[udf-regexp_udf--ForceBlocks] [GOOD] >> test.py::test[insert-override-proto-Results] [GOOD] >> test.py::test[insert-replace_inferred_op--Results] >> test.py::test[udf-regexp_udf--Results] >> test.py::test[action-eval_pragma--Results] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Results] >> test.py::test[blocks-combine_all_minmax_nested--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] [GOOD] >> test.py::test[produce-reduce_typeinfo--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_typeinfo--Results] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps--Results] [SKIPPED] >> test.py::test[sampling-subquery_limit-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test.py::test[blocks-exists--Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] >> test.py::test[join-yql-14829_left--Results] [GOOD] >> test.py::test[join-yql_465--ForceBlocks] >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-ForceBlocks] >> test.py::test[blocks-type_and_callable_stats--ForceBlocks] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] >> test.py::test[blocks-combine_hashed_sum_many_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] >> test.py::test[aggregate-group_by_expr_lookup--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] >> test.py::test[schema-select_yamr_fields--Results] [GOOD] >> test.py::test[select-append_to_value_1000--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs--Results] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--Results] [SKIPPED] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] >> test.py::test[ql_filter-integer_members--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_members--Results] >> test.py::test[select-struct_members-default.txt-Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] >> test.py::test[udf-regexp_udf--Results] [GOOD] >> test.py::test[union_all-union_all_multiin--ForceBlocks] |85.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q40-default.txt-ForceBlocks] >> test.py::test[join-left_semi_with_other-off-ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-ForceBlocks] |85.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> test.py::test[join-pullup_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_cross-off-Results] [SKIPPED] >> test.py::test[join-pullup_left_semi-off-ForceBlocks] >> test.py::test[blocks-type_and_callable_stats--Results] [GOOD] >> test.py::test[column_group-hint_append2--ForceBlocks] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[column_group-hint_append2--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append2--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--Results] [SKIPPED] >> test.py::test[column_group-length-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-length-single-Results] [SKIPPED] >> test.py::test[column_order-select_plain-default.txt-ForceBlocks] >> test.py::test[join-left_join_right_pushdown_simple--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee--Results] >> test.py::test[udf-named_args_for_script_with_posargs--Results] [GOOD] >> test.py::test[udf-two_regexps--ForceBlocks] >> test.py::test[select-exists_false-default.txt-Results] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt-Results] >> test.py::test[flatten_by-flatten_mode-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[schema-insert-row_spec-ForceBlocks] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[ql_filter-integer_optional--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_optional--Results] >> test.py::test[blocks-combine_hashed_some--Results] [GOOD] >> test.py::test[blocks-date_greater--Results] >> test.py::test[weak_field-weak_field_esc_yson--Results] >> test.py::test[select-exists_true-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] >> test.py::test[join-filter_joined--Results] [GOOD] >> test.py::test[join-flatten_columns2-off-Results] [SKIPPED] >> test.py::test[join-from_in_front_join--Results] >> test.py::test[limit-insert_with_limit-dynamic-Results] [GOOD] >> test.py::test[limit-limit_offset-default.txt-Results] >> test.py::test[action-subquery_merge_nested_subquery--Results] [GOOD] >> test.py::test[action-table_content_before_from_folder--Results] >> test.py::test[action-eval_for-default.txt-ForceBlocks] >> test.py::test[insert-replace_inferred_op--Results] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-Results] >> test.py::test[order_by-literal_with_assume--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[tpch-q3-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_limit-default.txt-Results] >> test.py::test[blocks-filter_by_column_with_drop--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Results] >> test.py::test[window-win_func_over_group_by_compl--Results] >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] [GOOD] >> test.py::test[in-in_compact_distinct-empty-ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] >> test.py::test[blocks-date_add_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--Results] >> test.py::test[ql_filter-integer_optional--Results] [GOOD] >> test.py::test[result_types-singular-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Results] >> test.py::test[join-yql_465--ForceBlocks] [GOOD] >> test.py::test[join-yql_465--Results] >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] >> test.py::test[order_by-literal_with_assume--Results] [GOOD] >> test.py::test[order_by-sort_decimals--ForceBlocks] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[tpch-q3-default.txt-Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] >> test.py::test[join-join_comp_map_table-off-ForceBlocks] [GOOD] >> test.py::test[join-join_comp_map_table-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-ForceBlocks] >> test.py::test[join-star_join--ForceBlocks] [GOOD] >> test.py::test[join-star_join--Results] >> test.py::test[pg-tpcds-q40-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q40-default.txt-Results] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] |85.3%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part14/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[union_all-union_all_multiin--ForceBlocks] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_multiin--Results] >> test.py::test[select-create_structures-default.txt-Results] >> test.py::test[join-yql_465--Results] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-ForceBlocks] >> test.py::test[select-exists_true-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect-1000-Results] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--ForceBlocks] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] >> test.py::test[join-pullup_left_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_left_semi-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-ForceBlocks] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] >> test.py::test[blocks-if--ForceBlocks] >> test.py::test[udf-two_regexps--ForceBlocks] [GOOD] >> test.py::test[udf-two_regexps--Results] >> test.py::test[select-exists_true-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] >> test.py::test[join-lookupjoin_bug7646_csee--Results] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-Results] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] >> test.py::test[join-lookupjoin_bug7646_csee-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o--ForceBlocks] >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] >> test.py::test[weak_field-weak_field_rest--Results] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] [GOOD] >> test.py::test[view-file_inner_udf--ForceBlocks] [SKIPPED] >> test.py::test[view-file_inner_udf--Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq--Results] >> test.py::test[weak_field-weak_field_join--ForceBlocks] >> test.py::test[union_all-union_all_multiin--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--ForceBlocks] >> test.py::test[limit-limit_offset-default.txt-Results] [GOOD] >> test.py::test[limit-yql-8611_calc_peephole--Results] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[join-pullup_left_semi-off-Results] [SKIPPED] >> test.py::test[schema-insert-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-insert-row_spec-Results] >> test.py::test[action-table_content_before_from_folder--Results] [GOOD] >> test.py::test[aggr_factory-boolor-default.txt-Results] >> test.py::test[action-eval_for-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_for-default.txt-Results] >> test.py::test[join-from_in_front_join--Results] [GOOD] >> test.py::test[join-from_in_front_join-off-Results] [SKIPPED] >> test.py::test[join-full_join-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test.py::test[join-group_compact_by--Results] >> test.py::test[pg-tpcds-q37-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] [GOOD] >> test.py::test[aggregate-group_by_session_nopush--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_session_nopush--Results] [SKIPPED] >> test.py::test[result_types-singular-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-singular-default.txt-Results] >> test.py::test[select-exists_true-default.txt-Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] >> test.py::test[window-win_func_over_group_by_compl--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--ForceBlocks] >> test.py::test[udf-two_regexps--Results] [GOOD] >> test.py::test[udf-udaf--ForceBlocks] >> test.py::test[in-in_compact_distinct-empty-ForceBlocks] [GOOD] >> test.py::test[in-in_compact_distinct-empty-Results] >> test.py::test[column_order-select_plain-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain-default.txt-Results] >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Results] [SKIPPED] >> test.py::test[produce-process_with_assume--Results] >> test.py::test[join-star_join--Results] [GOOD] >> test.py::test[join-star_join_multi--ForceBlocks] >> test.py::test[action-eval_for-default.txt-Results] [GOOD] >> test.py::test[action-eval_sample--ForceBlocks] >> test.py::test[pg-tpcds-q37-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-ForceBlocks] >> test.py::test[schema-insert-row_spec-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-ForceBlocks] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[aggregate-group_by_session_nopush--Results] [SKIPPED] >> test.py::test[aggregate-group_by_cube_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] >> test.py::test[blocks-combine_hashed_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] >> test.py::test[result_types-singular-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-6133_skip_deps--Results] [SKIPPED] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-ForceBlocks] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-ForceBlocks] >> test.py::test[join-join_without_correlation_names-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_names-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_not_selected--ForceBlocks] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] >> test.py::test[select-where_with_lambda--Results] >> test.py::test[in-in_compact_distinct-empty-Results] [GOOD] >> test.py::test[insert-append_sorted--ForceBlocks] >> test.py::test[pg-tpcds-q79-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] |85.3%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[select-sampleselect-1000-Results] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] >> test.py::test[order_by-sort_decimals--ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-Results] >> test.py::test[order_by-sort_decimals--Results] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] >> test.py::test[aggregate-having_cast-default.txt-ForceBlocks] >> test.py::test[column_order-select_plain-default.txt-Results] [GOOD] >> test.py::test[count-count_all-default.txt-ForceBlocks] >> test.py::test[blocks-if--ForceBlocks] [GOOD] >> test.py::test[blocks-if--Results] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--ForceBlocks] [SKIPPED] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] >> test.py::test[blocks-distinct_mixed_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] >> test.py::test[pg-tpcds-q79-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_rest--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] [SKIPPED] >> test.py::test[action-eval_pragma--ForceBlocks] >> test.py::test[weak_field-weak_field_join--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join--Results] >> test.py::test[weak_field-optimize_weak_fields_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] >> test.py::test[limit-insert_with_limit-dynamic-Results] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-ForceBlocks] >> test.py::test[select-literal_bool-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Results] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] >> test.py::test[join-lookupjoin_inner_2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_2o--Results] >> test.py::test[produce-process_with_assume--Results] [GOOD] >> test.py::test[produce-process_with_lambda-default.txt-Results] >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] [GOOD] >> test.py::test[window-win_with_cur_row--Results] >> test.py::test[order_by-sort_decimals--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[order_by-sort_simple--ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-ForceBlocks] >> test.py::test[blocks-if--Results] [GOOD] >> test.py::test[blocks-interval_add_date--ForceBlocks] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Results] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> test.py::test[udf-udaf--ForceBlocks] [GOOD] >> test.py::test[udf-udaf--Results] >> test.py::test[window-full/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-full/leadlag--Results] >> test.py::test[action-eval_sample--ForceBlocks] [GOOD] >> test.py::test[action-eval_sample--Results] >> test.py::test[pg-tpcds-q51-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--ForceBlocks] |85.3%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names--Results] >> test.py::test[select-where_with_lambda--Results] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] [GOOD] >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_with_order--Results] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Results] >> test.py::test[aggr_factory-boolor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-max_by-default.txt-Results] >> test.py::test[join-lookupjoin_not_selected--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_not_selected--Results] >> test.py::test[select-select_all_filtered-default.txt-Results] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] >> test.py::test[aggregate-table_funcs_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--ForceBlocks] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-ForceBlocks] >> test.py::test[join-join_left_cbo--ForceBlocks] >> test.py::test[window-win_func_part_by_expr--ForceBlocks] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] >> test.py::test[key_filter-empty_range--Results] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[join-lookupjoin_inner_2o--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-ForceBlocks] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/session--Results] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-ForceBlocks] >> test.py::test[count-count_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Results] [GOOD] >> test.py::test[join-alias_where_group--Results] >> test.py::test[action-eval_sample--Results] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-ForceBlocks] >> test.py::test[count-count_all-default.txt-Results] >> test.py::test[action-eval_pragma--ForceBlocks] [GOOD] >> test.py::test[action-eval_pragma--Results] >> test.py::test[join-star_join_multi--ForceBlocks] [GOOD] >> test.py::test[join-star_join_multi--Results] >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[udf-udaf--Results] [GOOD] >> test.py::test[view-system_udf--ForceBlocks] >> test.py::test[sampling-bind_topsort-default.txt-Results] [GOOD] >> test.py::test[sampling-map-keyfilter-ForceBlocks] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] [GOOD] >> test.py::test[blocks-compare--Results] >> test.py::test[aggregate-having_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-having_cast-default.txt-Results] >> test.py::test[pg-tpcds-q46-default.txt-ForceBlocks] >> test.py::test[blocks-date_greater--Results] [GOOD] >> test.py::test[blocks-date_not_equals--Results] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-out_table_record-default.txt-Results] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_binding--ForceBlocks] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--Results] >> test.py::test[pg-tpcds-q80-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] >> test.py::test[produce-process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] >> test.py::test[join-lookupjoin_not_selected--Results] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type--ForceBlocks] >> test.py::test[blocks-interval_mul_scalar--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-single-Results] [SKIPPED] >> test.py::test[count-count_all_view_concat--Results] >> test.py::test[count-count_all-default.txt-Results] [GOOD] >> test.py::test[count-count_by_nulls--ForceBlocks] >> test.py::test[window-win_func_auto_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-Results] >> test.py::test[action-eval_pragma--Results] [GOOD] >> test.py::test[action-eval_range--ForceBlocks] >> test.py::test[window-win_func_part_by_expr--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-ForceBlocks] >> test.py::test[limit-many_top_sorts-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[limit-yql-8611_calc_peephole--Results] [GOOD] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal2-default.txt-Results] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset--Results] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] >> test.py::test[select-sampleselect--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt--ForceBlocks] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--Results] >> test.py::test[select-shift_columns-default.txt-Results] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_semi_to_inner-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group-off-Results] [SKIPPED] >> test.py::test[join-premap_common_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_no_premap-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-ForceBlocks] >> test.py::test[pg-tpcds-q80-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-ForceBlocks] >> test.py::test[aggregate-having_cast-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_directread-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Results] >> test.py::test[insert-append_sorted--ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted--Results] >> test.py::test[in-in_ansi_join--Results] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] >> test.py::test[join-join_left_cbo--ForceBlocks] [GOOD] >> test.py::test[join-join_left_cbo--Results] >> test.py::test[weak_field-weak_field_join_condition--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Results] >> test.py::test[order_by-sort_simple--ForceBlocks] [GOOD] >> test.py::test[window-win_with_cur_row--Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] >> test.py::test[join-lookupjoin_inner_empty_subq-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[limit-many_top_sorts-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side-off-ForceBlocks] >> test.py::test[limit-sort_calc_limit--ForceBlocks] >> test.py::test[select-sampleselect--Results] [GOOD] >> test.py::test[select-simple_struct_field_access--ForceBlocks] >> test.py::test[order_by-sort_simple--Results] >> test.py::test[blocks-interval_add_date--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_date--Results] >> test.py::test[pg-tpcds-q59-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-Results] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] >> test.py::test[join-inner_with_order--Results] [GOOD] >> test.py::test[join-inner_with_order-off-Results] >> test.py::test[view-system_udf--ForceBlocks] [GOOD] >> test.py::test[view-system_udf--Results] >> test.py::test[pg-tpcds-q46-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt-Results] >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[window-win_func_auto_arg-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--ForceBlocks] >> test.py::test[count-count_all_view_concat--Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[schema-user_schema_directread-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--ForceBlocks] >> test.py::test[insert-append_sorted--Results] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--ForceBlocks] >> test.py::test[order_by-SortByOneFieldDesc--Results] [GOOD] >> test.py::test[order_by-assume_over_input_desc--Results] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] >> test.py::test[order_by-assume_over_input_desc--Results] [SKIPPED] >> test.py::test[order_by-limit--Results] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[hor_join-out_table_record-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-Results] >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] >> test.py::test[sampling-map-keyfilter-ForceBlocks] [GOOD] >> test.py::test[sampling-map-keyfilter-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--Results] >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-ForceBlocks] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-list--ForceBlocks] >> test.py::test[binding-table_regexp_binding--ForceBlocks] [GOOD] >> test.py::test[binding-table_regexp_binding--Results] >> test.py::test[join-join_left_cbo--Results] [GOOD] >> test.py::test[join-join_right_cbo--ForceBlocks] >> test.py::test[pg-tpcds-q46-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-ForceBlocks] >> test.py::test[count-count_by_nulls--ForceBlocks] [GOOD] >> test.py::test[count-count_by_nulls--Results] >> test.py::test[aggr_factory-max_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field--ForceBlocks] >> test.py::test[blocks-compare--Results] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--Results] >> test.py::test[order_by-sort_simple--Results] [GOOD] >> test.py::test[pg-in_mixed--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[select-shift_columns-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] >> test.py::test[action-eval_range--ForceBlocks] [GOOD] >> test.py::test[action-eval_range--Results] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[window-current/aggregations_leadlag--ForceBlocks] >> test.py::test[window-full/leadlag--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed--Results] >> test.py::test[insert-keepmeta_nonstrict_fail--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--ForceBlocks] >> test.py::test[blocks-interval_add_date--Results] [GOOD] >> test.py::test[blocks-sort_one_asc--ForceBlocks] >> test.py::test[optimizers-combinebykey_fields_subset--Results] [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce--Results] [SKIPPED] >> test.py::test[optimizers-length_over_merge--Results] >> test.py::test[join-star_join_multi--Results] [GOOD] >> test.py::test[join-star_join_with_diff_complex_key--ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_with_diff_complex_key--Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-ForceBlocks] >> test.py::test[join-mapjoin_on_very_complex_type--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type--Results] >> test.py::test[sampling-map-keyfilter-Results] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-ForceBlocks] >> test.py::test[binding-table_regexp_binding--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--ForceBlocks] >> test.py::test[in-in_enum_single1-default.txt-Results] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq--Results] [SKIPPED] >> test.py::test[insert-drop_sortness-calc-Results] >> test.py::test[join-premap_common_right_tablecontent--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup--Results] >> test.py::test[window-win_func_with_struct_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] |85.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-ForceBlocks] >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=363991) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[select-simple_struct_field_access--ForceBlocks] [GOOD] >> test.py::test[action-eval_range--Results] [GOOD] >> test.py::test[select-simple_struct_field_access--Results] >> test.py::test[action-insert_after_eval_xlock--ForceBlocks] >> test.py::test[pg-tpcds-q84-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-ForceBlocks] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] >> test.py::test[join-opt_on_opt_side-off-ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber-off-ForceBlocks] >> test.py::test[insert-keepmeta_proto_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_view_fail--ForceBlocks] >> test.py::test[action-eval_folder_via_file_in_job--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables--Results] [SKIPPED] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] >> test.py::test[select-simple_struct_field_access--Results] [GOOD] >> test.py::test[select-sum_to_string-default.txt-ForceBlocks] >> test.py::test[select-hits_count--ForceBlocks] [GOOD] >> test.py::test[select-hits_count--Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:08:39] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:08:40] "GET /foo.txt HTTP/1.1" 200 - >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--ForceBlocks] >> test.py::test[pg-tpcds-q91-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-Results] >> test.py::test[pg-tpcds-q55-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-Results] >> test.py::test[pg-in_mixed--ForceBlocks] [GOOD] >> test.py::test[pg-in_mixed--Results] >> test.py::test[join-join_right_cbo--ForceBlocks] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled--Results] >> test.py::test[insert-keepmeta_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_view_fail--Results] [GOOD] >> test.py::test[insert-override_view_fail--ForceBlocks] >> test.py::test[join-join_right_cbo--Results] >> test.py::test[limit-sort_calc_limit--ForceBlocks] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] >> test.py::test[order_by-limit--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-uuid--Results] >> test.py::test[weak_field-weak_field--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field--Results] >> test.py::test[order_by-sort_simple--Results] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[join-split_to_list_as_key-off-Results] [SKIPPED] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> test.py::test[hor_join-skip_sampling--Results] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] >> test.py::test[insert-part_sortness-desc-Results] >> test.py::test[optimizers-length_over_merge--Results] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Results] >> test.py::test[join-mapjoin_on_very_complex_type--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-ForceBlocks] >> test.py::test[aggregate-aggregate_udf_nested--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] >> test.py::test[blocks-sort_one_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_one_asc--Results] >> test.py::test[pg-tpcds-q55-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-ForceBlocks] >> test.py::test[blocks-add_uint64_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] >> test.py::test[pg-tpcds-q91-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-ForceBlocks] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] [SKIPPED] >> test.py::test[optimizers-sort_by_nonstrict_const--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sort_by_nonstrict_const--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--ForceBlocks] >> test.py::test[pg-in_mixed--Results] [GOOD] >> test.py::test[pg-select_yql_type--ForceBlocks] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:08:33] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:08:34] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:08:36] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--ForceBlocks] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-Results] [SKIPPED] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-ForceBlocks] >> test.py::test[select-trivial_order_by-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] >> test.py::test[insert-override_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-override_view_fail--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup--ForceBlocks] >> test.py::test[distinct-distinct_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] >> test.py::test[join-three_equalities_paren--ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren--Results] >> test.py::test[weak_field-weak_field--Results] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--ForceBlocks] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-list_literal4-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal4-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--ForceBlocks] >> test.py::test[join-join_right_cbo--Results] [GOOD] >> test.py::test[join-left_join_null_column--ForceBlocks] >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--Results] [GOOD] >> test.py::test[window-current/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-current/aggregations_leadlag--Results] >> test.py::test[blocks-date_equals_scalar--Results] >> test.py::test[window-full/session_aliases--Results] >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_pg--ForceBlocks] >> test.py::test[aggr_factory-min-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Results] >> test.py::test[blocks-sort_one_asc--Results] [GOOD] >> test.py::test[column_group-groups-max-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-max-Results] [SKIPPED] >> test.py::test[aggr_factory-list--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-list--Results] >> test.py::test[blocks-top_sort_two_mix--ForceBlocks] >> test.py::test[action-insert_after_eval_xlock--ForceBlocks] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Results] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] >> test.py::test[action-eval_folder_via_file_in_job--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job--Results] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> test.py::test[blocks-date_not_equals--Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] >> test.py::test[join-pullup_rownumber-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_rownumber-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-ForceBlocks] >> test.py::test[select-sum_to_string-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-sum_to_string-default.txt-Results] >> test.py::test[sampling-subquery_expr-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-ForceBlocks] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[column_group-groups-max-Results] [SKIPPED] >> test.py::test[schema-user_schema_missing_column--ForceBlocks] >> test.py::test[weak_field-weak_field_wrong_types_fail--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--ForceBlocks] >> test.py::test[distinct-distinct_columns-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] >> test.py::test[optimizers-multi_to_empty_constraint--Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] [SKIPPED] >> test.py::test[sampling-subquery_limit-default.txt-Results] >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] >> test.py::test[action-eval_folder_via_file_in_job--Results] [GOOD] >> test.py::test[action-subquery_merge2-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] [GOOD] >> test.py::test[aggregate-group_by_session--Results] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] >> test.py::test[blocks-block_input-aux_columns-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input-aux_columns-Results] [SKIPPED] >> test.py::test[blocks-boolean_ops--ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] [GOOD] >> test.py::test[window-win_func_rank_by_part--ForceBlocks] >> test.py::test[select-sum_to_string-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-ForceBlocks] >> test.py::test[action-insert_after_eval_xlock--Results] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-ForceBlocks] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[optimizers-multi_to_empty_constraint--Results] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q60-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-Results] >> test.py::test[select-trivial_order_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-Results] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted--ForceBlocks] >> test.py::test[pg-tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[pg-select_yql_type--ForceBlocks] [GOOD] >> test.py::test[pg-select_yql_type--Results] >> test.py::test[window-current/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-current/session_aliases--ForceBlocks] >> test.py::test[order_by-sort_simple--Results] [GOOD] >> test.py::test[params-complex_yson--Results] >> test.py::test[window-generic/aggregations_mixed--Results] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--Results] >> test.py::test[aggr_factory-list--Results] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-ForceBlocks] >> test.py::test[select-trivial_order_by-default.txt-Results] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--ForceBlocks] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] [GOOD] >> test.py::test[join-simple_columns_partial--ForceBlocks] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] [GOOD] >> test.py::test[type_v3-append_diff_flags--Results] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] >> test.py::test[in-in_with_list_dict-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] >> test.py::test[aggregate-group_by_session_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] >> test.py::test[pg-tpcds-q60-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-ForceBlocks] >> test.py::test[join-left_join_null_column--ForceBlocks] [GOOD] >> test.py::test[join-left_join_null_column--Results] >> test.py::test[pg-tpch-q04-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_dup--Results] >> test.py::test[blocks-top_sort_two_mix--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] >> test.py::test[join-selfjoin_on_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter--ForceBlocks] >> test.py::test[pg-tpcds-q90-default.txt-ForceBlocks] |85.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[optimizers-unused_columns_window--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window--Results] >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_mapjoin-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] >> test.py::test[key_filter-uuid--Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> test.py::test[blocks-interval_div--Results] >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] >> test.py::test[action-subquery_merge2-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge2-default.txt-Results] >> test.py::test[schema-user_schema_missing_column--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_missing_column--Results] >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[blocks-combine_hashed_pg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_pg--Results] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter1--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter1--Results] [SKIPPED] >> test.py::test[limit-yql-8611_calc_peephole--ForceBlocks] >> test.py::test[params-complex_yson--Results] [GOOD] >> test.py::test[pg-insert--Results] >> test.py::test[blocks-boolean_ops--ForceBlocks] [GOOD] >> test.py::test[blocks-boolean_ops--Results] >> test.py::test[action-subquery_merge2-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_missing_column--Results] [GOOD] >> test.py::test[select-autoextract_source_value-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--ForceBlocks] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> test.py::test[column_group-hint_non_lst_yson_fail--ForceBlocks] >> test.py::test[column_group-hint_non_lst_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_tmp-default.txt-ForceBlocks] >> test.py::test[aggr_factory-some-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Results] >> test.py::test[distinct-distinct_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Results] >> test.py::test[dq-dq_replicate_ok-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[expr-as_table_emptylist--ForceBlocks] |85.4%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part19/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] [GOOD] >> test.py::test[schema-insert_sorted-read_schema-ForceBlocks] >> test.py::test[action-runtime_if_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-Results] >> test.py::test[join-left_join_null_column--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--ForceBlocks] >> test.py::test[join-mergejoin_force_no_sorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted--Results] >> test.py::test[window-win_func_rank_by_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_part--Results] >> test.py::test[optimizers-unused_columns_window--Results] [GOOD] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--ForceBlocks] |85.4%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[optimizers-yql-17715_concat_sort_desc--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Results] [SKIPPED] >> test.py::test[order_by-assume_with_transform_desc--ForceBlocks] >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[lineage-some_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] >> test.py::test[action-eval_column--ForceBlocks] >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[udf-python_script_from_file--Results] [SKIPPED] >> test.py::test[udf-udaf_lambda-default.txt-Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Results] >> test.py::test[aggregate-group_by_session--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] >> test.py::test[in-in_with_list_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_list_dict-default.txt-Results] >> test.py::test[blocks-boolean_ops--Results] [GOOD] >> test.py::test[blocks-combine_all_some--ForceBlocks] >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pragma-file-default.txt-ForceBlocks] >> test.py::test[type_v3-mergejoin_with_sort--ForceBlocks] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> test.py::test[action-dep_world_quote_code-default.txt-ForceBlocks] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[select-uncorrelated_subqueries--ForceBlocks] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] >> test.py::test[pg-tpcds-q90-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] >> test.py::test[action-runtime_if_select-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-ForceBlocks] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> test.py::test[join-simple_columns_partial--ForceBlocks] [GOOD] >> test.py::test[join-simple_columns_partial--Results] >> test.py::test[pg-tpcds-q02-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[in-in_with_list_dict-default.txt-Results] [GOOD] >> test.py::test[insert-drop_sortness-calc-ForceBlocks] >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[schema-def_values_job--Results] >> test.py::test[window-win_func_rank_by_part--Results] [GOOD] >> test.py::test[window-yql-14738-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter--ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] >> test.py::test[pg-insert--Results] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] >> test.py::test[pg-tpcds-q64-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] >> test.py::test[blocks-combine_hashed_pg--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--ForceBlocks] >> test.py::test[join-anyjoin_common_dup--Results] [GOOD] >> test.py::test[join-convert_key-off-Results] [SKIPPED] >> test.py::test[join-force_merge_join-default.txt-Results] >> test.py::test[pg-tpch-q20-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-lambda--ForceBlocks] >> test.py::test[window-generic/aggregations_include_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-ForceBlocks] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [SKIPPED] >> test.py::test[aggregate-avg_with_having-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-ForceBlocks] >> test.py::test[aggregate-group_by_expr_dict--ForceBlocks] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-ForceBlocks] >> test.py::test[blocks-interval_div--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] >> test.py::test[distinct-distinct_window-default.txt-Results] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan--ForceBlocks] [SKIPPED] >> test.py::test[expr-evaluate_parse_inf_nan--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-ForceBlocks] >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> test.py::test[type_v3-split--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-split--Results] [SKIPPED] >> test.py::test[type_v3-uuid--ForceBlocks] >> test.py::test[expr-as_table_emptylist--ForceBlocks] [GOOD] >> test.py::test[expr-as_table_emptylist--Results] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] >> test.py::test[tpch-q17-default.txt-ForceBlocks] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[window-current/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-current/session_aliases--Results] >> test.py::test[join-anyjoin_common_dup--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup-off-ForceBlocks] >> test.py::test[select-autoextract_source_value-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-autoextract_source_value-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--Results] >> test.py::test[hor_join-yql19332_aux_cols--ForceBlocks] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q64-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-ForceBlocks] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[pragma-file-default.txt-ForceBlocks] [GOOD] >> test.py::test[pragma-file-default.txt-Results] >> test.py::test[schema-insert_sorted-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-read_schema-Results] >> test.py::test[window-lagging/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-mixed/aggregations--Results] >> test.py::test[udf-udaf_lambda-default.txt-Results] [GOOD] >> test.py::test[udf-udaf_short--Results] >> test.py::test[blocks-date_equals_scalar--Results] [GOOD] >> test.py::test[blocks-date_less--Results] >> test.py::test[aggr_factory-every-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> test.py::test[expr-as_table_emptylist--Results] [GOOD] >> test.py::test[hor_join-table_record--ForceBlocks] >> test.py::test[order_by-assume_with_transform_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_field_subset--ForceBlocks] >> test.py::test[select-autoextract_source_value-default.txt-Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-ForceBlocks] >> test.py::test[select-uncorrelated_subqueries--ForceBlocks] [GOOD] >> test.py::test[select-uncorrelated_subqueries--Results] >> test.py::test[blocks-combine_all_some--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_some--Results] >> test.py::test[schema-insert_sorted-read_schema-Results] [GOOD] >> test.py::test[schema-other--ForceBlocks] [SKIPPED] >> test.py::test[schema-other--Results] [SKIPPED] >> test.py::test[schema-other_job--ForceBlocks] [SKIPPED] >> test.py::test[schema-other_job--Results] [SKIPPED] >> test.py::test[schema-patchtype--ForceBlocks] >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] >> test.py::test[column_order-insert_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt-Results] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-ForceBlocks] >> test.py::test[insert-drop_sortness-calc-ForceBlocks] [GOOD] >> test.py::test[insert-drop_sortness-calc-Results] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] >> test.py::test[window-current/session_aliases--Results] [GOOD] >> test.py::test[window-generic/aggregations_after_current--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_simple--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--ForceBlocks] >> test.py::test[join-simple_columns_partial--Results] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--ForceBlocks] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] [GOOD] >> test.py::test[action-table_content_before_from_folder--ForceBlocks] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-win_expr_bounds--Results] >> test.py::test[pg-tpcds-q98-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-Results] >> test.py::test[order_by-assume_with_transform_desc--Results] [GOOD] >> test.py::test[order_by-literal--ForceBlocks] >> test.py::test[pg-tpcds-q06-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[window-generic/aggregations_include_current--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-ForceBlocks] >> test.py::test[pg_catalog-lambda--ForceBlocks] [GOOD] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[union_all-union_all_trivial-default.txt-ForceBlocks] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] [GOOD] >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] >> test.py::test[action-dep_world_quote_code-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> test.py::test[aggregate-group_by_expr_dict--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--Results] >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] >> test.py::test[insert-replace_inferred_op--ForceBlocks] >> test.py::test[type_v3-uuid--ForceBlocks] [GOOD] >> test.py::test[type_v3-uuid--Results] >> test.py::test[limit-yql-8611_calc_peephole--ForceBlocks] [GOOD] >> test.py::test[limit-yql-8611_calc_peephole--Results] >> test.py::test[select-uncorrelated_subqueries--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-ForceBlocks] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-ForceBlocks] >> test.py::test[column_order-insert_tmp-default.txt-Results] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q98-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-ForceBlocks] >> test.py::test[aggr_factory-stddev-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] >> test.py::test[blocks-interval_sub_interval--Results] [GOOD] >> test.py::test[blocks-pg_to_dates--Results] >> test.py::test[blocks-combine_all_some--Results] [GOOD] >> test.py::test[blocks-combine_hashed_avg--ForceBlocks] >> test.py::test[pg-tpcds-q99-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] >> test.py::test[column_group-groups-lookup-ForceBlocks] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-Results] >> test.py::test[column_group-groups-lookup-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-lookup-Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-ForceBlocks] >> test.py::test[hor_join-yql19332_aux_cols--ForceBlocks] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns-default.txt-Results] >> test.py::test[hor_join-yql19332_aux_cols--Results] >> test.py::test[join-force_merge_join-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_not_null--Results] >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_sampling-sorted-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> test.py::test[tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q17-default.txt-Results] >> test.py::test[udf-udaf_short--Results] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--Results] >> test.py::test[hor_join-table_record--ForceBlocks] [GOOD] >> test.py::test[hor_join-table_record--Results] >> test.py::test[type_v3-uuid--Results] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] >> test.py::test[pg-tpcds-q99-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-ForceBlocks] >> test.py::test[window-yql-14738-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] |85.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part19/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-distinct_opt_state_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Results] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-empty_do-default.txt-ForceBlocks] >> test.py::test[select-corr_name_in_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] >> test.py::test[coalesce-coalesce_few_opt--ForceBlocks] >> test.py::test[weak_field-weak_field_type-default.txt-Results] [GOOD] >> test.py::test[window-full/leadlag_compact--ForceBlocks] >> test.py::test[join-anyjoin_common_dup-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_dup-off-Results] [SKIPPED] >> test.py::test[join-inner_grouped-off-ForceBlocks] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] >> test.py::test[aggregate-group_by_expr_dict--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-ForceBlocks] >> test.py::test[action-eval_column--ForceBlocks] [GOOD] >> test.py::test[action-eval_column--Results] >> test.py::test[produce-reduce_all_field_subset--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_field_subset--Results] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[order_by-literal--ForceBlocks] [GOOD] >> test.py::test[order_by-literal--Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test.py::test[hor_join-yql19332_aux_cols--Results] [GOOD] >> test.py::test[insert-append_proto_fail--ForceBlocks] |85.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-yql-10654_pullup_with_sys_columns--ForceBlocks] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] >> test.py::test[hor_join-table_record--Results] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[result_types-pg-default.txt-ForceBlocks] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup-default.txt-ForceBlocks] >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[tpch-q22-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_trivial-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Results] >> test.py::test[pg-select_from_columns-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-Results] >> test.py::test[insert-replace_inferred_op--ForceBlocks] [GOOD] >> test.py::test[insert-replace_inferred_op--Results] >> test.py::test[order_by-literal--Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--ForceBlocks] >> test.py::test[blocks-pg_to_dates--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] >> test.py::test[join-lookupjoin_bug7646_subst--Results] >> test.py::test[action-table_content_before_from_folder--ForceBlocks] [GOOD] >> test.py::test[action-table_content_before_from_folder--Results] >> test.py::test[schema-patchtype--ForceBlocks] [GOOD] >> test.py::test[schema-patchtype--Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> test.py::test[pg-tpch-q11-default.txt-ForceBlocks] >> test.py::test[binding-table_range_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] >> test.py::test[produce-reduce_all_field_subset--Results] [GOOD] >> test.py::test[produce-reduce_all_multi_in-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] >> test.py::test[blocks-combine_hashed_avg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[limit-yql-8611_calc_peephole--Results] [GOOD] >> test.py::test[lineage-select_union_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_union_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unused_columns-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-unused_columns-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-map_force--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-map_force--Results] [SKIPPED] >> test.py::test[multicluster-pull-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-pull-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--ForceBlocks] [SKIPPED] >> test.py::test[union_all-union_all_trivial-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-ForceBlocks] >> test.py::test[insert-append_proto_fail--ForceBlocks] [GOOD] >> test.py::test[insert-append_proto_fail--Results] [GOOD] >> test.py::test[insert-keepmeta-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-Results] [SKIPPED] >> test.py::test[insert-part_sortness-desc-ForceBlocks] >> test.py::test[pg-tpch-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] >> test.py::test[produce-process_row_and_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-Results] >> test.py::test[action-empty_do-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-empty_do-default.txt-Results] >> test.py::test[pg-tpcds-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-ForceBlocks] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] [GOOD] >> test.py::test[join-yql-14847-off-ForceBlocks] >> test.py::test[insert-replace_inferred_op--Results] [GOOD] >> test.py::test[join-count_bans--ForceBlocks] >> test.py::test[column_order-select_win_func-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-Results] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] >> test.py::test[blocks-distinct_opt_state_all--Results] [GOOD] >> test.py::test[blocks-sort_one_desc--ForceBlocks] >> test.py::test[schema-patchtype--Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv_raw-ForceBlocks] >> test.py::test[join-full_equal_not_null--Results] [GOOD] >> test.py::test[join-full_trivial--Results] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--ForceBlocks] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--ForceBlocks] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Results] [SKIPPED] >> test.py::test[join-star_join_mirror-off-ForceBlocks] >> test.py::test[aggregate-compare_by_nulls-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] >> test.py::test[action-table_content_before_from_folder--Results] [GOOD] >> test.py::test[aggregate-compact_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-ForceBlocks] >> test.py::test[action-empty_do-default.txt-Results] [GOOD] >> test.py::test[action-eval_typeof_output_table--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_subst--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-ForceBlocks] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--ForceBlocks] >> test.py::test[coalesce-coalesce_few_opt--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Results] >> test.py::test[pg-tpcds-q21-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-Results] >> test.py::test[produce-process_row_and_columns-default.txt-Results] [GOOD] >> test.py::test[produce-process_rows_sorted_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[join-inner_grouped-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test.py::test[select-dict_lookup-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup-default.txt-Results] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] >> test.py::test[pg-tpch-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] [GOOD] >> test.py::test[tpch-q10-default.txt-ForceBlocks] >> test.py::test[window-generic/aggregations_after_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-evaluate_queries--ForceBlocks] >> test.py::test[in-in_enum_single0-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] >> test.py::test[pg-tpch-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-ForceBlocks] >> test.py::test[window-yql-14738-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--ForceBlocks] >> test.py::test[result_types-pg-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-pg-default.txt-Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-ForceBlocks] >> test.py::test[window-full/leadlag_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/leadlag_compact--Results] >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [GOOD] >> test.py::test[column_order-join_nosimple--ForceBlocks] [SKIPPED] >> test.py::test[column_order-join_nosimple--Results] [SKIPPED] >> test.py::test[distinct-distinct_count_only-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_num_key_and_subkey--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] >> test.py::test[action-eval_typeof_output_table--ForceBlocks] [GOOD] >> test.py::test[action-eval_typeof_output_table--Results] [GOOD] >> test.py::test[agg_apply-avg_const_interval--ForceBlocks] >> test.py::test[window-mixed/aggregations--Results] [GOOD] >> test.py::test[window-rank/plain--Results] >> test.py::test[coalesce-coalesce_few_opt--Results] [GOOD] >> test.py::test[column_group-hint_dup_def_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Results] [SKIPPED] >> test.py::test[select-dict_lookup-default.txt-Results] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[column_order-select_orderby-default.txt-ForceBlocks] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr--ForceBlocks] >> test.py::test[select-optional_as_warn-default.txt-Results] [SKIPPED] >> test.py::test[select-result_rows_limit--ForceBlocks] [SKIPPED] >> test.py::test[select-result_rows_limit--Results] [SKIPPED] >> test.py::test[select-tablepathprefix-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-ForceBlocks] >> test.py::test[insert-part_sortness-desc-ForceBlocks] [GOOD] >> test.py::test[insert-part_sortness-desc-Results] >> test.py::test[union_all-union_all_with_limits-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] >> test.py::test[result_types-pg-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--ForceBlocks] >> test.py::test[column_order-select_win_func-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_asyncfile--ForceBlocks] >> test.py::test[schema-select_all-yamred_dsv_raw-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] >> test.py::test[tpch-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q22-default.txt-Results] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[limit-limit--Results] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[blocks-sort_one_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_one_desc--Results] >> test.py::test[blocks-date_less--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar--Results] >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--ForceBlocks] >> test.py::test[join-yql-14847-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14847-off-Results] [SKIPPED] >> test.py::test[join-yql-8131-off-ForceBlocks] [SKIPPED] >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> test.py::test[join-yql-8980--ForceBlocks] >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> test.py::test[join-lookupjoin_semi--ForceBlocks] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] >> test.py::test[produce-reduce_multi_in_ref--Results] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-standalone_view_lambda--ForceBlocks] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] >> test.py::test[window-generic/aggregations_after_current--Results] [GOOD] >> test.py::test[join-count_bans--ForceBlocks] [GOOD] >> test.py::test[join-count_bans--Results] >> test.py::test[join-lookupjoin_inner_1o2o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite--ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] >> test.py::test[window-row_number_to_map-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q23-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt-Results] >> test.py::test[pg-tpch-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Results] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[join-full_trivial--Results] [GOOD] >> test.py::test[join-inner_trivial-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial_from_concat-off-Results] [SKIPPED] >> test.py::test[join-join_comp_common_table-off-Results] [SKIPPED] >> test.py::test[join-join_comp_map_table--Results] >> test.py::test[action-evaluate_queries--ForceBlocks] [GOOD] >> test.py::test[action-evaluate_queries--Results] >> test.py::test[window-full/leadlag_compact--Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[schema-select_all-yamred_dsv_raw-Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-ForceBlocks] >> test.py::test[join-star_join_mirror-off-ForceBlocks] [GOOD] >> test.py::test[blocks-sort_one_desc--Results] [GOOD] >> test.py::test[blocks-top_sort_one_desc--ForceBlocks] >> test.py::test[ypath-limit_with_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt-Results] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[join-yql-12022-off-ForceBlocks] >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[column_group-hint-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[blocks-distinct_mixed_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] >> test.py::test[action-evaluate_queries--Results] [GOOD] >> test.py::test[action-unwrap_runtime_fail_with_column_message--ForceBlocks] >> test.py::test[distinct-distinct_count_only-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Results] >> test.py::test[column_order-select_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_orderby-default.txt-Results] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--ForceBlocks] >> test.py::test[agg_apply-avg_const_interval--ForceBlocks] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Results] >> test.py::test[hor_join-max_outtables--ForceBlocks] >> test.py::test[aggregate-group_by_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr--Results] >> test.py::test[hor_join-max_outtables--ForceBlocks] [SKIPPED] >> test.py::test[tpch-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--Results] [SKIPPED] >> test.py::test[in-in_enum_single1-default.txt-ForceBlocks] >> test.py::test[tpch-q10-default.txt-Results] >> test.py::test[select-tablepathprefix-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Results] >> test.py::test[limit-limit--Results] [GOOD] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-basic-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[tpch-q22-default.txt-Results] [GOOD] >> test.py::test[tpch-q3-default.txt-ForceBlocks] >> test.py::test[ypath-limit_with_key-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_having--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[sampling-direct_read--ForceBlocks] [GOOD] >> test.py::test[sampling-direct_read--Results] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] >> test.py::test[column_order-select_orderby-default.txt-Results] [GOOD] >> test.py::test[count-count_all_view_concat--ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key--ForceBlocks] >> test.py::test[order_by-order_by_tuple_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] >> test.py::test[dq-precompute_asyncfile--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_asyncfile--Results] >> test.py::test[blocks-interval_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-mod_uint64--Results] >> test.py::test[select-create_structures-default.txt-ForceBlocks] >> test.py::test[action-unwrap_runtime_fail_with_column_message--ForceBlocks] [GOOD] >> test.py::test[action-unwrap_runtime_fail_with_column_message--Results] [GOOD] >> test.py::test[agg_apply-table--ForceBlocks] >> test.py::test[dq-precompute_asyncfile--Results] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_order_by_fail--ForceBlocks] >> test.py::test[order_by-order_with_null-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_count_only-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] [GOOD] >> test.py::test[blocks-exists--ForceBlocks] >> test.py::test[join-full_trivial_udf_call--ForceBlocks] >> test.py::test[pg-tpch-q16-default.txt-Results] >> test.py::test[select-tablepathprefix-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled_1000--ForceBlocks] >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q40-default.txt-Results] >> test.py::test[insert-select_operate_with_columns--ForceBlocks] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] >> test.py::test[join-lookupjoin_semi--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi--Results] >> test.py::test[aggregate-group_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--ForceBlocks] >> test.py::test[agg_apply-avg_const_interval--Results] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-ForceBlocks] >> test.py::test[view-standalone_view_lambda--ForceBlocks] [GOOD] >> test.py::test[view-standalone_view_lambda--Results] >> test.py::test[join-yql-8980--ForceBlocks] [GOOD] >> test.py::test[join-yql-8980--Results] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] >> test.py::test[join-count_bans--Results] [GOOD] >> test.py::test[join-grace_join2--ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-direct_read-dynamic-ForceBlocks] >> test.py::test[sampling-mapjoin_left_sample-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-Results] [GOOD] >> test.py::test[view-standalone_view_lambda--Results] >> test.py::test[aggregate-group_by_expr_mul_col--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] [GOOD] >> test.py::test[join-bush_in_in--ForceBlocks] >> test.py::test[pg-tpch-q11-default.txt-Results] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--ForceBlocks] >> test.py::test[expr-non_persistable_order_by_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_order_by_fail--Results] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-ForceBlocks] >> test.py::test[join-yql-12022-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-12022-off-Results] [SKIPPED] >> test.py::test[json-json_value/example--ForceBlocks] >> test.py::test[view-standalone_view_lambda--Results] [GOOD] >> test.py::test[view-trivial_view--ForceBlocks] >> test.py::test[blocks-top_sort_one_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--ForceBlocks] >> test.py::test[window-row_number_to_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-ForceBlocks] >> test.py::test[window-row_number_to_map-default.txt-Results] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] >> test.py::test[join-mapjoin_early_rewrite--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--ForceBlocks] >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] >> test.py::test[limit-limit-dynamic-ForceBlocks] >> test.py::test[ypath-limit_with_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--ForceBlocks] >> test.py::test[ypath-limit_with_range-default.txt-Results] |85.6%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-mod_uint64--Results] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] >> test.py::test[blocks-combine_all_minmax_nested--ForceBlocks] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test.py::test[count-count_all_view_concat--ForceBlocks] [GOOD] >> test.py::test[count-count_all_view_concat--Results] >> test.py::test[produce-process_streaming_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-Results] >> test.py::test[blocks-top_sort_one_desc--Results] [GOOD] >> test.py::test[column_group-groups-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-single-Results] [SKIPPED] >> test.py::test[column_group-length-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-length-perusage-Results] [SKIPPED] >> test.py::test[column_order-select_groupby_with_star-default.txt-ForceBlocks] >> test.py::test[join-yql-8980--Results] [GOOD] >> test.py::test[json-jsondocument/insert--ForceBlocks] >> test.py::test[window-rank/plain--Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] >> test.py::test[tpch-q3-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_apply-table--ForceBlocks] [GOOD] >> test.py::test[agg_apply-table--Results] >> test.py::test[tpch-q3-default.txt-Results] >> test.py::test[aggregate-group_by_hop_expr_key--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_duo--ForceBlocks] >> test.py::test[blocks-exists--ForceBlocks] [GOOD] >> test.py::test[blocks-exists--Results] >> test.py::test[select-create_structures-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-create_structures-default.txt-Results] >> test.py::test[distinct-distinct_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Results] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_fail--ForceBlocks] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Results] [SKIPPED] >> test.py::test[produce-native_desc_reduce_with_presort--ForceBlocks] [SKIPPED] >> test.py::test[produce-native_desc_reduce_with_presort--Results] [SKIPPED] >> test.py::test[produce-process_pure_with_sort-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_mul_col--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-div_uint64--ForceBlocks] >> test.py::test[order_by-order_with_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Results] >> test.py::test[join-grace_join2--ForceBlocks] [GOOD] >> test.py::test[join-grace_join2--Results] [SKIPPED] >> test.py::test[join-inner_on_key_only-off-ForceBlocks] >> test.py::test[join-full_trivial_udf_call--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial_udf_call--Results] >> test.py::test[select-unlabeled_1000--ForceBlocks] [GOOD] >> test.py::test[select-unlabeled_1000--Results] >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt-ForceBlocks] >> test.py::test[count-count_all_view_concat--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-ForceBlocks] >> test.py::test[sampling-mapjoin_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] >> test.py::test[pg-aggregate_minus_zero--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> test.py::test[view-standalone_view_lambda--Results] [GOOD] >> test.py::test[weak_field-weak_field_data--Results] >> test.py::test[sampling-direct_read-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-direct_read-dynamic-Results] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-ForceBlocks] >> test.py::test[join-join_comp_map_table--Results] [GOOD] >> test.py::test[join-join_key_cmp_udf--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--ForceBlocks] >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_before_current--Results] >> test.py::test[select-dot_in_alias-default.txt-ForceBlocks] >> test.py::test[agg_apply-table--Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-ForceBlocks] >> test.py::test[insert-override-from_sorted-ForceBlocks] >> test.py::test[blocks-exists--Results] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--ForceBlocks] >> test.py::test[produce-process_streaming_count-default.txt-Results] [GOOD] >> test.py::test[produce-process_trivial_as_struct-default.txt-ForceBlocks] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] >> test.py::test[view-trivial_view--ForceBlocks] [GOOD] >> test.py::test[view-trivial_view--Results] >> test.py::test[join-bush_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in--Results] >> test.py::test[tpch-q3-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-ForceBlocks] >> test.py::test[select-unlabeled_1000--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_mul_col--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--ForceBlocks] >> test.py::test[limit-limit-dynamic-ForceBlocks] [GOOD] >> test.py::test[limit-limit-dynamic-Results] >> test.py::test[order_by-order_with_null-default.txt-Results] [GOOD] >> test.py::test[order_by-sort--ForceBlocks] >> test.py::test[sampling-direct_read-dynamic-Results] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-ForceBlocks] >> test.py::test[join-full_trivial_udf_call--Results] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-ForceBlocks] >> test.py::test[blocks-mod_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-pg_from_dates--Results] >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-ForceBlocks] >> test.py::test[window-row_number_to_map-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-ForceBlocks] >> test.py::test[window-row_number_to_map_noncompact-default.txt-ForceBlocks] >> test.py::test[key_filter-convert--Results] >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] >> test.py::test[view-trivial_view--Results] [GOOD] >> test.py::test[weak_field-weak_field_opt--ForceBlocks] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] [GOOD] >> test.py::test[in-in_noansi_join--ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] >> test.py::test[aggr_factory-mode-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] |85.6%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[json-jsondocument/insert--ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[limit-limit-dynamic-Results] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--ForceBlocks] >> test.py::test[blocks-combine_all_minmax_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_minmax_nested--Results] >> test.py::test[column_order-select_groupby_with_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] >> test.py::test[blocks-div_uint64--ForceBlocks] [GOOD] >> test.py::test[blocks-div_uint64--Results] >> test.py::test[aggregate-group_by_mul_gs_gs--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] >> test.py::test[join-inner_on_key_only-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_on_key_only-off-Results] [SKIPPED] >> test.py::test[join-join_and_distinct_key--ForceBlocks] >> test.py::test[join-left_join_null_column-off-ForceBlocks] >> test.py::test[weak_field-weak_field_data--Results] [GOOD] >> test.py::test[produce-process_pure_with_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join--Results] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] >> test.py::test[window-generic/aggregations_before_current--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] >> test.py::test[join-bush_in_in--Results] [GOOD] >> test.py::test[join-grace_join1-map-ForceBlocks] >> test.py::test[produce-process_trivial_as_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-ForceBlocks] >> test.py::test[in-in_with_table_of_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] >> test.py::test[json-json_value/example--ForceBlocks] [GOOD] >> test.py::test[json-json_value/example--Results] >> test.py::test[select-dot_in_alias-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Results] >> test.py::test[distinct-distinct_count_and_avg-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo--Results] >> test.py::test[insert-override-from_sorted-ForceBlocks] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[insert-override-from_sorted-Results] >> test.py::test[blocks-mul_uint64_opt2--Results] >> test.py::test[join-pullup_extend--Results] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] >> test.py::test[blocks-div_uint64--Results] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-ForceBlocks] >> test.py::test[blocks-combine_all_minmax_nested--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum--ForceBlocks] >> test.py::test[union_all-mix_map_and_project-trivial_map-ForceBlocks] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> test.py::test[join-mapjoin_with_empty_struct-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group--ForceBlocks] >> test.py::test[blocks-pg_from_dates--Results] [GOOD] >> test.py::test[blocks-sort_one_desc--Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-ForceBlocks] [GOOD] >> test.py::test[key_filter-convert--Results] [GOOD] >> test.py::test[key_filter-empty_range--ForceBlocks] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] >> test.py::test[join-mergejoin_semi_to_inner--ForceBlocks] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-ForceBlocks] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--Results] >> test.py::test[produce-fuse_reduces_with_presort--ForceBlocks] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--Results] >> test.py::test[order_by-sort--ForceBlocks] [GOOD] >> test.py::test[order_by-sort--Results] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] >> test.py::test[produce-process_with_python-default.txt-ForceBlocks] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] >> test.py::test[insert-override-from_sorted-Results] [GOOD] >> test.py::test[insert-override-proto-ForceBlocks] >> test.py::test[join-join_key_cmp_udf--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-Results] >> test.py::test[distinct-distinct_union_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Results] >> test.py::test[blocks-mul_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-not_opt--ForceBlocks] >> test.py::test[pg-select_columnref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-ForceBlocks] >> test.py::test[select-dot_in_alias-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--ForceBlocks] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[select-substring-default.txt-ForceBlocks] >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--ForceBlocks] >> test.py::test[aggr_factory-bottom_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] >> test.py::test[weak_field-weak_field_opt--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_opt--Results] >> test.py::test[window-row_number_to_map_noncompact-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--ForceBlocks] >> test.py::test[join-full_trivial_udf_call-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-Results] [SKIPPED] >> test.py::test[join-grace_join1--ForceBlocks] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt-ForceBlocks] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-ForceBlocks] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-ForceBlocks] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-ForceBlocks] >> test.py::test[json-json_value/example--Results] [GOOD] >> test.py::test[key_filter-between_with_key_filter--ForceBlocks] >> test.py::test[bigdate-table_yt_key_filter-on-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on-Results] [SKIPPED] >> test.py::test[binding-table_concat_strict_binding-default.txt-ForceBlocks] >> test.py::test[order_by-sort--Results] [GOOD] >> test.py::test[order_by-union_all--ForceBlocks] >> test.py::test[limit-limit_over_sort_desc_in_subquery--ForceBlocks] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--ForceBlocks] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_opt--Results] [GOOD] >> test.py::test[weak_field-weak_field_rest--ForceBlocks] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--Results] >> test.py::test[join-grace_join1-map-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] >> test.py::test[key_filter-dependent_value-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] >> test.py::test[hor_join-filters--ForceBlocks] >> test.py::test[join-left_join_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-left_join_null_column-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_optional--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct--ForceBlocks] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_table1-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_with_join--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--ForceBlocks] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--ForceBlocks] |85.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-ForceBlocks] >> test.py::test[blocks-mod_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] >> test.py::test[join-pullup_extend--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--ForceBlocks] >> test.py::test[join-join_and_distinct_key--ForceBlocks] [GOOD] >> test.py::test[join-join_and_distinct_key--Results] >> test.py::test[blocks-sort_one_desc--Results] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] [GOOD] >> test.py::test[lineage-flatten_by--ForceBlocks] >> test.py::test[blocks-combine_hashed_sum--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_sum--Results] >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-ForceBlocks] >> test.py::test[blocks-not_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-not_opt--Results] >> test.py::test[produce-process_with_python-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_python-default.txt-Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] >> test.py::test[join-mergejoin_semi_to_inner--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--ForceBlocks] >> test.py::test[select-substring-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-override-proto-ForceBlocks] [GOOD] |85.7%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part16/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[insert-override-proto-Results] >> test.py::test[select-substring-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-ForceBlocks] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[blocks-sort_one_desc--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-ForceBlocks] >> test.py::test[blocks-mod_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-not--ForceBlocks] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> test.py::test[join-grace_join1--ForceBlocks] [GOOD] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] >> test.py::test[aggregate-GroupByTwoFields--ForceBlocks] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--Results] >> test.py::test[produce-process_with_python-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-ForceBlocks] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--ForceBlocks] >> test.py::test[column_order-select_limit_offset-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt-Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] >> test.py::test[blocks-not_opt--Results] [GOOD] >> test.py::test[blocks-string_pass--ForceBlocks] >> test.py::test[key_filter-between_with_key_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Results] >> test.py::test[insert-override-proto-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--ForceBlocks] >> test.py::test[blocks-combine_hashed_sum--Results] [GOOD] >> test.py::test[blocks-date_greater--ForceBlocks] >> test.py::test[select-substring-default.txt-Results] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] >> test.py::test[pg-select_table1-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-ForceBlocks] >> test.py::test[pg-select_table1-default.txt-Results] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[schema-select_simple-default.txt-ForceBlocks] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] >> test.py::test[join-mergejoin_semi_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-ForceBlocks] >> test.py::test[join-join_and_distinct_key--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--ForceBlocks] >> test.py::test[aggregate-aggregation_by_udf--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--Results] >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Results] >> test.py::test[hor_join-filters--ForceBlocks] [GOOD] >> test.py::test[hor_join-filters--Results] >> test.py::test[weak_field-weak_field_rest--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_rest--Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_select_distinct--ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-ForceBlocks] >> test.py::test[optimizers-yql-5833-table_content--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] >> test.py::test[column_order-select_limit_offset-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_like--ForceBlocks] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--Results] [SKIPPED] >> test.py::test[aggregate-GroupByTwoFields--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--ForceBlocks] >> test.py::test[aggregate-group_by_mul_ru_ru--ForceBlocks] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_optional--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_optional--Results] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo--Results] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-block_input_mapreduce--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_mapreduce--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_hashed_min--ForceBlocks] >> test.py::test[in-in_noansi_join--ForceBlocks] [GOOD] >> test.py::test[in-in_noansi_join--Results] >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Results] >> test.py::test[order_by-union_all--ForceBlocks] [GOOD] >> test.py::test[order_by-union_all--Results] >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test.py::test[key_filter-contains_optional--ForceBlocks] >> test.py::test[window-win_func_rank_by_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_all--Results] >> test.py::test[insert-use_anon_table_without_fill_fail--ForceBlocks] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[insert-yql-14538--ForceBlocks] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[column_order-select_limit_offset-default.txt-Results] [GOOD] >> test.py::test[hor_join-filters--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test.py::test[action-action_eval_cluster_table_for--ForceBlocks] >> test.py::test[weak_field-weak_field_rest--Results] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--ForceBlocks] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[aggregate-group_by_ru_with_select_distinct--ForceBlocks] [GOOD] >> test.py::test[blocks-not--ForceBlocks] [GOOD] >> test.py::test[blocks-not--Results] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] [GOOD] >> test.py::test[select-deep_udf_call--ForceBlocks] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] |85.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part16/test-results/pytest/{meta.json ... results_accumulator.log} |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test.py::test[join-star_join_inners_premap--ForceBlocks] >> test.py::test[lineage-flatten_by--ForceBlocks] [GOOD] >> test.py::test[lineage-flatten_by--Results] >> test.py::test[optimizers-yql-5833-table_content--Results] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--ForceBlocks] >> test.py::test[join-inner_grouped_by_expr--Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--ForceBlocks] >> test.py::test[produce-fuse_reduces_with_presort--Results] [GOOD] >> test.py::test[produce-process_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted-Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream--Results] [SKIPPED] >> test.py::test[ql_filter-integer_eval--ForceBlocks] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] [GOOD] >> test.py::test[dq-pool_trees_whitelist--ForceBlocks] >> test.py::test[key_filter-empty_range--ForceBlocks] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[insert-append_with_read_udf_fail--Results] >> test.py::test[window-win_lead_in_mem-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Results] >> test.py::test[aggr_factory-stddev-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Results] >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[order_by-yql-19598--ForceBlocks] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_optional--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-ForceBlocks] >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys-off-ForceBlocks] >> test.py::test[blocks-string_pass--ForceBlocks] [GOOD] >> test.py::test[blocks-string_pass--Results] >> test.py::test[window-win_func_rank_by_all--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-ForceBlocks] >> test.py::test[join-join_key_cmp_udf-off-ForceBlocks] [GOOD] >> test.py::test[join-join_key_cmp_udf-off-Results] [SKIPPED] >> test.py::test[join-left_all-off-ForceBlocks] >> test.py::test[schema-select_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] [GOOD] >> test.py::test[insert-insert_from_other--ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> test.py::test[join-mergejoin_semi_to_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Results] >> test.py::test[aggregate-group_by_cube_join_count--ForceBlocks] [GOOD] >> test.py::test[lineage-flatten_by--Results] [GOOD] >> test.py::test[lineage-grouping_sets--ForceBlocks] >> test.py::test[join-mergejoin_semi_to_inner-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join--ForceBlocks] >> test.py::test[produce-reduce_multi_in-sorted-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-Results] >> test.py::test[aggregate-group_by_cube_join_count--Results] >> test.py::test[join-opt_on_opt_side_with_group-off-ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-off-ForceBlocks] >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_stat--Results] >> test.py::test[insert-append_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-drop_sortness--ForceBlocks] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] >> test.py::test[select-table_content_from_double_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_nested_right--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] >> test.py::test[blocks-string_pass--Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Results] >> test.py::test[blocks-combine_hashed_min--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_min--Results] >> test.py::test[table_range-range_over_like--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_like--Results] >> test.py::test[schema-select_simple-default.txt-Results] [GOOD] >> test.py::test[schema-skip_complex_type--ForceBlocks] >> test.py::test[key_filter-contains_optional--ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_optional--Results] >> test.py::test[join-strict_keys--Results] |85.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[blocks-string_pass--Results] [GOOD] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--Results] >> test.py::test[select-deep_udf_call--ForceBlocks] [GOOD] >> test.py::test[select-deep_udf_call--Results] >> test.py::test[ql_filter-integer_eval--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_eval--Results] >> test.py::test[insert-yql-14538--ForceBlocks] [GOOD] >> test.py::test[insert-yql-14538--Results] >> test.py::test[action-action_eval_cluster_table_for--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_table_for--Results] >> test.py::test[aggr_factory-stddev-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top-default.txt-ForceBlocks] >> test.py::test[table_range-range_over_like--Results] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-ForceBlocks] >> test.py::test[weak_field-yql-7888_mapfieldsubset--ForceBlocks] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] >> test.py::test[aggregate-avg_and_sum_by_value--Results] [GOOD] >> test.py::test[aggregate-error_type--Results] >> test.py::test[produce-reduce_multi_in-sorted-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_sampling--ForceBlocks] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum--ForceBlocks] >> test.py::test[order_by-yql-19598--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_min--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--ForceBlocks] >> test.py::test[dq-pool_trees_whitelist--ForceBlocks] [GOOD] >> test.py::test[dq-pool_trees_whitelist--Results] [SKIPPED] >> test.py::test[epochs-read_modified--ForceBlocks] >> test.py::test[key_filter-empty_range--Results] [GOOD] >> test.py::test[limit-dynamic_limit--ForceBlocks] [SKIPPED] >> test.py::test[limit-dynamic_limit--Results] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Results] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-process-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-process-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Results] [SKIPPED] >> test.py::test[lineage-scalar_context--ForceBlocks] [SKIPPED] >> test.py::test[order_by-yql-19598--Results] >> test.py::test[lineage-scalar_context--Results] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--ForceBlocks] >> test.py::test[key_filter-contains_optional--Results] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-ForceBlocks] >> test.py::test[join-left_all-off-ForceBlocks] [GOOD] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner--ForceBlocks] >> test.py::test[join-star_join_inners_premap--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap--Results] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[table_range-range_over_like--Results] [GOOD] >> test.py::test[join-strict_keys--Results] [GOOD] >> test.py::test[join-yql-4275--ForceBlocks] >> test.py::test[order_by-literal_with_assume_desc--Results] [GOOD] >> test.py::test[order_by-order_by_expr--ForceBlocks] >> test.py::test[hor_join-sorted_out--ForceBlocks] >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[ql_filter-integer_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_members_eval--ForceBlocks] >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[action-action_eval_cluster_table_for--Results] [GOOD] >> test.py::test[action-action_nested_query-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--Results] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-ForceBlocks] >> test.py::test[produce-process_with_python_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_with_python_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--Results] [SKIPPED] >> test.py::test[sampling-read--ForceBlocks] >> test.py::test[insert_monotonic-break_sort_fail--ForceBlocks] >> test.py::test[join-mapjoin_partial_uniq_keys-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[window-win_multiaggr-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] >> test.py::test[type_v3-append_diff_layout1--Results] >> test.py::test[aggregate-error_type--Results] [GOOD] >> test.py::test[order_by-yql-19598--Results] [GOOD] >> test.py::test[pg-pg_types_orderby--ForceBlocks] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] [GOOD] >> test.py::test[window-full/syscolumns--ForceBlocks] >> test.py::test[insert-insert_from_other--ForceBlocks] [GOOD] >> test.py::test[insert-insert_from_other--Results] >> test.py::test[join-lookupjoin_bug7646_subst-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o--ForceBlocks] >> test.py::test[insert-drop_sortness--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr--Results] >> test.py::test[join-join_semi_correlation_in_order_by--Results] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--Results] >> test.py::test[insert-drop_sortness--Results] >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--ForceBlocks] >> test.py::test[join-lookupjoin_inner_2o-off-ForceBlocks] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[join-nested_semi_join--ForceBlocks] [GOOD] >> test.py::test[join-nested_semi_join--Results] >> test.py::test[blocks-date_greater--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater--Results] >> test.py::test[aggregate-group_by_rollup_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join--Results] >> test.py::test[pg-tpch-q19-default.txt-Results] >> test.py::test[select-reuse_named_node-default.txt-ForceBlocks] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[type_v3-type_subset--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] >> test.py::test[schema-skip_complex_type--ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type--Results] >> test.py::test[ql_filter-integer_many_noskiff--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[window-win_func_aggr_hist--ForceBlocks] >> test.py::test[insert_monotonic-break_sort_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[lineage-grouping_sets--ForceBlocks] [GOOD] >> test.py::test[lineage-grouping_sets--Results] >> test.py::test[insert-insert_from_other--Results] [GOOD] >> test.py::test[insert-override-with_read_udf-ForceBlocks] >> test.py::test[insert_monotonic-keep_unique--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-keep_unique--Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] >> test.py::test[insert-drop_sortness--Results] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping--ForceBlocks] >> test.py::test[join-premap_merge_with_remap-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-ForceBlocks] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[ypath-complex-default.txt-ForceBlocks] >> test.py::test[join-star_join_inners_premap--Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-ForceBlocks] >> test.py::test[window-win_func_aggr_stat--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> test.py::test[schema-skip_complex_type--Results] [GOOD] >> test.py::test[select-append_to_value--ForceBlocks] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[ql_filter-integer_many_noskiff--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Results] >> test.py::test[join-lookupjoin_inner--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] >> test.py::test[ql_filter-integer_members_eval--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_members_eval--Results] >> test.py::test[optimizers-unused_columns_window_no_payloads--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] >> test.py::test[aggregate-group_by_mul_ru_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] >> test.py::test[action-eval_input_output_table_subquery--ForceBlocks] >> test.py::test[type_v3-append_diff_layout1--Results] [GOOD] >> test.py::test[udf-udaf_distinct--ForceBlocks] >> test.py::test[window-lagging/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--ForceBlocks] >> test.py::test[join-nested_semi_join--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-ForceBlocks] >> test.py::test[join-yql-4275--ForceBlocks] [GOOD] >> test.py::test[join-yql-4275--Results] >> test.py::test[action-action_nested_query-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_nested_query-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] >> test.py::test[hor_join-sorted_out--ForceBlocks] [GOOD] >> test.py::test[hor_join-sorted_out--Results] >> test.py::test[produce-reduce_multi_in_sampling--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] >> test.py::test[column_group-hint_diff_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-publish-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-publish-single-Results] [SKIPPED] >> test.py::test[column_group-respull--ForceBlocks] >> test.py::test[ql_filter-integer_members_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_single--ForceBlocks] >> test.py::test[column_group-respull--ForceBlocks] [SKIPPED] >> test.py::test[column_group-respull--Results] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder--ForceBlocks] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] >> test.py::test[order_by-order_by_expr--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr--Results] >> test.py::test[join-left_only_semi_and_other-off-ForceBlocks] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[sampling-read--ForceBlocks] [GOOD] >> test.py::test[sampling-read--Results] >> test.py::test[lineage-grouping_sets--Results] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] >> test.py::test[lineage-union_all_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate--ForceBlocks] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[order_by-literal_empty_list_sort--ForceBlocks] >> test.py::test[blocks-combine_hashed_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_set--ForceBlocks] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> test.py::test[aggr_factory-top-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] >> test.py::test[join-lookupjoin_inner_1o2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] >> test.py::test[select-reuse_named_node-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] >> test.py::test[join-lookupjoin_inner_2o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_empty_subq--ForceBlocks] >> test.py::test[epochs-read_modified--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o--ForceBlocks] >> test.py::test[action-action_nested_query-default.txt-Results] [GOOD] >> test.py::test[action-insert_each_from_folder--ForceBlocks] >> test.py::test[epochs-read_modified--Results] >> test.py::test[aggregate-group_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--Results] >> test.py::test[aggregate-group_by_expr_semi_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--Results] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[view-view_with_library--ForceBlocks] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q19-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_lambda-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-several1-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Results] >> test.py::test[insert-override-with_read_udf-ForceBlocks] [GOOD] >> test.py::test[insert-override-with_read_udf-Results] >> test.py::test[hor_join-sorted_out--Results] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq--ForceBlocks] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq--Results] [SKIPPED] >> test.py::test[insert-append-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-append-with_view-Results] [SKIPPED] >> test.py::test[insert-select_after_replace_unwrap-default.txt-ForceBlocks] >> test.py::test[order_by-native_desc_publish--ForceBlocks] >> test.py::test[join-yql-4275--Results] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--ForceBlocks] >> test.py::test[order_by-native_desc_publish--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[pg-select_common_type_unionall--ForceBlocks] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce--ForceBlocks] [SKIPPED] >> test.py::test[sampling-reduce--Results] [SKIPPED] >> test.py::test[sampling-subquery_multiple_sample-default.txt-ForceBlocks] >> test.py::test[key_filter-dict_contains_optional--ForceBlocks] >> test.py::test[blocks-date_greater--Results] [GOOD] >> test.py::test[blocks-date_less_scalar--ForceBlocks] >> test.py::test[select-append_to_value--ForceBlocks] [GOOD] >> test.py::test[select-append_to_value--Results] >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Results] [SKIPPED] >> test.py::test[join-star_join_semionly--ForceBlocks] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_multi_key--ForceBlocks] >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-ForceBlocks] >> test.py::test[select-reuse_named_node-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect-1000-ForceBlocks] >> test.py::test[join-lookupjoin_inner_1o2o--Results] [GOOD] >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_column_fail--ForceBlocks] >> test.py::test[ypath-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] >> test.py::test[produce-reduce_multi_in_sampling--Results] [GOOD] >> test.py::test[ql_filter-integer_bounds--ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite-off-ForceBlocks] >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] [GOOD] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Results] [GOOD] >> test.py::test[join-alias_where_group-off-ForceBlocks] >> test.py::test[insert-override-with_read_udf-Results] [GOOD] >> test.py::test[insert-udf_empty--ForceBlocks] >> test.py::test[pg-pg_types_orderby--ForceBlocks] [GOOD] >> test.py::test[pg-pg_types_orderby--Results] [SKIPPED] >> test.py::test[blocks-filter_partial_expr--ForceBlocks] >> test.py::test[pg-tpcds-q26-default.txt-ForceBlocks] >> test.py::test[select-append_to_value--Results] [GOOD] >> test.py::test[select-boolean_where--ForceBlocks] >> test.py::test[action-eval_atom_wrong_type_expr--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--ForceBlocks] >> test.py::test[window-win_func_aggr_hist--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> test.py::test[join-premap_common_inner_both_sides-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-ForceBlocks] >> test.py::test[window-full/syscolumns--ForceBlocks] [GOOD] >> test.py::test[window-full/syscolumns--Results] >> test.py::test[ql_filter-integer_single--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single--Results] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--ForceBlocks] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> test.py::test[join-star_join_inners_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] >> test.py::test[udf-udaf_distinct--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--ForceBlocks] [GOOD] >> test.py::test[udf-udaf_distinct--Results] >> test.py::test[order_by-literal_empty_list_sort--Results] >> test.py::test[aggregate-group_by_expr_semi_join--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-ForceBlocks] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-ForceBlocks] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_set--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] >> test.py::test[expr-non_persistable_group_by_column_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_group_by_column_fail--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field--ForceBlocks] >> test.py::test[optimizers-aggregate_over_aggregate--ForceBlocks] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate--Results] >> test.py::test[aggregate-group_by_rollup_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping--Results] >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] >> test.py::test[join-inner_all_right--ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o--Results] >> test.py::test[join-lookupjoin_inner_empty_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] >> test.py::test[ql_filter-integer_single--Results] [GOOD] >> test.py::test[sampling-bind_default-default.txt-ForceBlocks] >> test.py::test[order_by-literal_empty_list_sort--Results] [GOOD] >> test.py::test[order_by-native_desc_sort--ForceBlocks] >> test.py::test[action-eval_on_modif_table_fail--ForceBlocks] [GOOD] >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[aggr_factory-count-default.txt-ForceBlocks] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-ForceBlocks] >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[action-eval_like--ForceBlocks] >> test.py::test[order_by-native_desc_sort--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[produce-process_with_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_lambda-default.txt-Results] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[view-view_with_library--ForceBlocks] [GOOD] >> test.py::test[view-view_with_library--Results] >> test.py::test[blocks-combine_hashed_set--Results] [GOOD] >> test.py::test[blocks-date_top_sort--ForceBlocks] >> test.py::test[action-insert_each_from_folder--ForceBlocks] [GOOD] >> test.py::test[action-insert_each_from_folder--Results] >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_first_last_rev--ForceBlocks] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_session--ForceBlocks] >> test.py::test[udf-udaf_distinct--Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] >> test.py::test[view-file_inner--ForceBlocks] >> test.py::test[view-view_with_lambda--ForceBlocks] [GOOD] >> test.py::test[view-view_with_lambda--Results] >> test.py::test[insert-select_after_replace_unwrap-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] >> test.py::test[optimizers-aggregate_over_aggregate--Results] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--ForceBlocks] >> test.py::test[join-star_join_semionly--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] [GOOD] >> test.py::test[join-star_join_semionly--Results] >> test.py::test[window-win_func_rank_by_opt_all--ForceBlocks] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[select-sampleselect-1000-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] >> test.py::test[pg-tpcds-q26-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q26-default.txt-Results] >> test.py::test[key_filter-is_null_multi_key--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_multi_key--Results] >> test.py::test[ql_filter-integer_bounds--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_bounds--Results] >> test.py::test[select-sampleselect-1000-Results] >> test.py::test[insert-udf_empty--ForceBlocks] [GOOD] >> test.py::test[insert-udf_empty--Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names-off-ForceBlocks] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] >> test.py::test[join-lookupjoin_semi_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache--ForceBlocks] >> test.py::test[view-view_with_library--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] >> test.py::test[join-alias_where_group-off-ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group-off-Results] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-off-ForceBlocks] >> test.py::test[produce-process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_only_join--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] >> test.py::test[view-view_with_lambda--Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--ForceBlocks] >> test.py::test[pg-tpcds-q26-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-ForceBlocks] >> test.py::test[action-insert_each_from_folder--Results] [GOOD] >> test.py::test[action-subquery-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_bounds--Results] [GOOD] >> test.py::test[sampling-bind_expr_udf--ForceBlocks] >> test.py::test[select-boolean_where--ForceBlocks] [GOOD] >> test.py::test[select-boolean_where--Results] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] >> test.py::test[window-full/syscolumns--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-ForceBlocks] >> test.py::test[insert-udf_empty--Results] [GOOD] >> test.py::test[insert_monotonic-break_unique_fail--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-break_unique_fail--Results] [SKIPPED] >> test.py::test[join-aggr_diff_order-default.txt-ForceBlocks] >> test.py::test[key_filter-is_null_multi_key--Results] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--ForceBlocks] >> test.py::test[join-premap_map_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_cross--Results] >> test.py::test[select-sampleselect-1000-Results] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-ForceBlocks] >> test.py::test[pg-select_common_type_unionall--ForceBlocks] [GOOD] >> test.py::test[pg-select_common_type_unionall--Results] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-ForceBlocks] >> test.py::test[ansi_idents-join_using-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-Results] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg_from_dates--ForceBlocks] >> test.py::test[flatten_by-flatten_one_field--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_one_field--Results] >> test.py::test[join-inner_all_right--ForceBlocks] [GOOD] >> test.py::test[join-inner_all_right--Results] >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] >> test.py::test[join-star_join_semionly--Results] [GOOD] >> test.py::test[key_filter-yql-19420--ForceBlocks] >> test.py::test[aggregate-group_compact_sorted--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-ForceBlocks] >> test.py::test[sampling-bind_default-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] >> test.py::test[pg-tpch-q17-default.txt-Results] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] >> test.py::test[select-boolean_where--Results] [GOOD] >> test.py::test[select-create_tuples-default.txt-ForceBlocks] >> test.py::test[insert-use_anon_table_before_commit_fail--ForceBlocks] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] [GOOD] >> test.py::test[insert-yql-13083-existig-ForceBlocks] >> test.py::test[action-eval_like--ForceBlocks] [GOOD] >> test.py::test[action-eval_like--Results] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--ForceBlocks] >> test.py::test[hor_join-skip_yamr--Results] >> test.py::test[join-premap_map_cross--Results] [GOOD] >> test.py::test[join-premap_merge_inner--ForceBlocks] >> test.py::test[view-file_inner--ForceBlocks] [GOOD] >> test.py::test[view-file_inner--Results] >> test.py::test[ansi_idents-join_using-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_yt_key_filter--ForceBlocks] [SKIPPED] >> test.py::test[bigdate-tz_table_yt_key_filter--Results] [SKIPPED] >> test.py::test[blocks-add_int16--ForceBlocks] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-limit--ForceBlocks] >> test.py::test[optimizers-multi_to_empty_constraint--ForceBlocks] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Results] >> test.py::test[aggr_factory-udaf-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> test.py::test[flatten_by-flatten_one_field--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--ForceBlocks] >> test.py::test[aggr_factory-count-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-ForceBlocks] >> test.py::test[produce-process_with_python_as_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] >> test.py::test[blocks-date_top_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-ForceBlocks] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--ForceBlocks] >> test.py::test[action-eval_like--Results] [GOOD] >> test.py::test[action-nested_subquery--ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[window-win_func_first_last_rev--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] >> test.py::test[window-win_func_rank_by_opt_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Results] >> test.py::test[pg-tpcds-q44-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-Results] >> test.py::test[optimizers-multi_to_empty_constraint--Results] [GOOD] >> test.py::test[optimizers-unused_columns_group--ForceBlocks] >> test.py::test[view-file_inner--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--ForceBlocks] >> test.py::test[aggregate-group_by_session--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session--Results] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] >> test.py::test[window-win_func_in_lib--Results] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_with_cache--ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test.py::test[join-lookupjoin_with_cache--Results] >> test.py::test[optimizers-sort_constraint_in_left--ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_window_func--ForceBlocks] >> test.py::test[select-select_concrete_detailed_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-ForceBlocks] >> test.py::test[window-row_number_no_part_from_subq-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-ForceBlocks] >> test.py::test[blocks-pg_from_dates--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_from_dates--Results] >> test.py::test[schema-select_all-row_spec_hide_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> test.py::test[sampling-bind_expr_udf--ForceBlocks] [GOOD] >> test.py::test[sampling-bind_expr_udf--Results] >> test.py::test[join-aggr_diff_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-aggr_diff_order-default.txt-Results] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin_list--Results] [SKIPPED] >> test.py::test[sampling-map--ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] >> test.py::test[key_filter-part_key_over_dynamic--ForceBlocks] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Results] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] >> test.py::test[aggregate-having_distinct_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] >> test.py::test[pg-tpcds-q44-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-ForceBlocks] >> test.py::test[order_by-native_desc_sort_with_limit--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-order_by_list_of_strings--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-off-ForceBlocks] >> test.py::test[action-subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery-default.txt-Results] >> test.py::test[hor_join-skip_yamr--Results] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-ForceBlocks] >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-ForceBlocks] >> test.py::test[window-win_func_first_last_rev--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--ForceBlocks] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-partial_blocks1--ForceBlocks] >> test.py::test[window-win_func_rank_by_opt_all--Results] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--ForceBlocks] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail--ForceBlocks] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--ForceBlocks] >> test.py::test[join-anyjoin_merge_nodup-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in-off-ForceBlocks] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range_empty_fail--ForceBlocks] >> test.py::test[key_filter-yql-19420--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-19420--Results] >> test.py::test[select-create_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-create_tuples-default.txt-Results] >> test.py::test[blocks-pg_from_dates--Results] [GOOD] >> test.py::test[count-boolean_count--ForceBlocks] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_4func--ForceBlocks] >> test.py::test[limit-limit--ForceBlocks] [GOOD] >> test.py::test[limit-limit--Results] >> test.py::test[key_filter-part_key_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--ForceBlocks] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[blocks-add_int16--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int16--Results] >> test.py::test[aggregate-group_by_session--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--ForceBlocks] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] [GOOD] >> test.py::test[join-premap_merge_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_inner--Results] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] >> test.py::test[type_v3-float--ForceBlocks] >> test.py::test[join-lookupjoin_with_cache--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--ForceBlocks] >> test.py::test[schema-user_schema_mix3--ForceBlocks] >> test.py::test[action-subquery-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-ForceBlocks] >> test.py::test[action-nested_subquery--ForceBlocks] [GOOD] >> test.py::test[action-nested_subquery--Results] >> test.py::test[flatten_by-flatten_one_field_another--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Results] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--ForceBlocks] >> test.py::test[join-aggr_diff_order-default.txt-Results] [GOOD] >> test.py::test[join-cbo_7tables_only_common_join--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_7tables_only_common_join--Results] [SKIPPED] >> test.py::test[join-from_in_front_join--ForceBlocks] >> test.py::test[join-inner_grouped_by_expr-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-Results] [SKIPPED] >> test.py::test[join-left_only_with_other--ForceBlocks] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] >> test.py::test[schema-select_all-row_spec_diff_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] >> test.py::test[distinct-distinct_star1--ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[simple_columns-simple_columns_base_fail--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-ForceBlocks] >> test.py::test[select-create_tuples-default.txt-Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-ForceBlocks] >> test.py::test[limit-limit--Results] [GOOD] >> test.py::test[lineage-if_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range--ForceBlocks] >> test.py::test[schema-select_all_inferschema_range_empty_fail--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_range_empty_fail--Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-ForceBlocks] >> test.py::test[blocks-add_int16--Results] [GOOD] >> test.py::test[blocks-block_input_sys_columns--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[blocks-combine_all_max--ForceBlocks] >> test.py::test[optimizers-unused_columns_group--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_group--Results] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-ForceBlocks] >> test.py::test[key_filter-yql-19420--Results] [GOOD] >> test.py::test[lambda-lambda_udf--ForceBlocks] >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-min-default.txt-ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] >> test.py::test[join-premap_merge_inner--Results] [GOOD] >> test.py::test[join-premap_no_premap--ForceBlocks] >> test.py::test[pg-tpcds-q53-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-ForceBlocks] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--Results] >> test.py::test[insert-yql-13083-existig-ForceBlocks] [GOOD] >> test.py::test[insert-yql-13083-existig-Results] >> test.py::test[pg-select_subquery2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-Results] >> test.py::test[sampling-map--ForceBlocks] [GOOD] >> test.py::test[sampling-map--Results] >> test.py::test[flatten_by-flatten_one_field_another--Results] [GOOD] >> test.py::test[hor_join-max_in_tables--ForceBlocks] >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_list_of_strings--ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_constraint_in_left--ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_constraint_in_left--Results] >> test.py::test[aggregate-group_by_ru_with_window_func--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] >> test.py::test[aggregate-agg_phases_table2-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] >> test.py::test[blocks-partial_blocks1--ForceBlocks] [GOOD] >> test.py::test[blocks-partial_blocks1--Results] >> test.py::test[order_by-order_by_list_of_strings--Results] >> test.py::test[join-mergejoin_with_table_range-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on--ForceBlocks] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-read_cost-default.txt-ForceBlocks] >> test.py::test[window-win_func_in_lib--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] >> test.py::test[aggr_factory-log_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] >> test.py::test[insert-select_after_insert_relabeled-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--ForceBlocks] >> test.py::test[pg-tpcds-q53-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-ForceBlocks] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] >> test.py::test[key_filter-yql-8663-dedup_ranges--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] >> test.py::test[optimizers-unused_columns_group--Results] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--ForceBlocks] >> test.py::test[join-bush_dis_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Results] >> test.py::test[join-bush_dis_in_in-off-Results] [SKIPPED] >> test.py::test[join-force_merge_join-default.txt-ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--ForceBlocks] >> test.py::test[sampling-map--Results] [GOOD] >> test.py::test[sampling-reduce-with_premap-ForceBlocks] [SKIPPED] >> test.py::test[sampling-reduce-with_premap-Results] [SKIPPED] >> test.py::test[sampling-yql-14664_deps-default.txt-ForceBlocks] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] >> test.py::test[insert-yql-13083-existig-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--ForceBlocks] >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[blocks-string_len_and_cmp--ForceBlocks] >> test.py::test[type_v3-float--ForceBlocks] [GOOD] >> test.py::test[type_v3-float--Results] >> test.py::test[schema-user_schema_mix3--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix3--Results] >> test.py::test[order_by-order_by_list_of_strings--Results] [GOOD] >> test.py::test[pg-insert--ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite_sequence--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] >> test.py::test[pg-select_subquery2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-ForceBlocks] >> test.py::test[count-boolean_count--ForceBlocks] [GOOD] >> test.py::test[count-boolean_count--Results] >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] >> test.py::test[optimizers-sort_constraint_in_left--Results] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--ForceBlocks] >> test.py::test[select-substring_v1-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-ForceBlocks] >> test.py::test[join-from_in_front_join--ForceBlocks] [GOOD] >> test.py::test[join-from_in_front_join--Results] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-ForceBlocks] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-ForceBlocks] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] >> test.py::test[window-win_func_over_group_by--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> test.py::test[join-left_only_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_only_with_other--Results] >> test.py::test[select-dict_with_few_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[lambda-lambda_udf--ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_udf--Results] >> test.py::test[window-win_func_aggr_4func--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] >> test.py::test[pg-join_using_tables1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-Results] >> test.py::test[schema-user_schema_mix3--Results] [GOOD] >> test.py::test[select-calculated_values-default.txt-ForceBlocks] >> test.py::test[type_v3-float--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--ForceBlocks] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi--ForceBlocks] >> test.py::test[aggregate-group_by_session_distinct_compact--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] >> test.py::test[optimizers-combinebykey_fields_subset_range--ForceBlocks] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] >> test.py::test[schema-select_with_map-sorted_desc-ForceBlocks] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--ForceBlocks] >> test.py::test[blocks-combine_all_max--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max--Results] >> test.py::test[blocks-combine_all_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] >> test.py::test[select-substring_v1-default.txt-Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-ForceBlocks] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-ForceBlocks] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] [GOOD] >> test.py::test[window-win_multiaggr_library--ForceBlocks] >> test.py::test[join-premap_merge_extrasort2--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-ForceBlocks] >> test.py::test[lambda-lambda_udf--Results] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-ForceBlocks] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> test.py::test[count-boolean_count--Results] [GOOD] >> test.py::test[count-count_nullable--ForceBlocks] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[select-result_size_limit_with_fill--ForceBlocks] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[dq-read_cost-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-read_cost-default.txt-Results] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append--ForceBlocks] >> test.py::test[join-nopushdown_filter_with_depends_on--ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] >> test.py::test[aggr_factory-min-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-max_in_tables--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] >> test.py::test[hor_join-max_in_tables--Results] >> test.py::test[join-from_in_front_join--Results] [GOOD] >> test.py::test[join-full_equal_not_null-off-ForceBlocks] >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-flatmap_with_non_struct_out--ForceBlocks] [SKIPPED] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-ForceBlocks] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-ForceBlocks] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--Results] [GOOD] >> test.py::test[udf-udf--ForceBlocks] >> test.py::test[join-premap_no_premap--ForceBlocks] [GOOD] >> test.py::test[join-premap_no_premap--Results] >> test.py::test[pg-join_using_tables1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_4func--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--ForceBlocks] >> test.py::test[blocks-date_less_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] >> test.py::test[join-force_merge_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-force_merge_join-default.txt-Results] >> test.py::test[pg-tpcds-q08-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-Results] >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[join-left_only_with_other--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--ForceBlocks] >> test.py::test[blocks-combine_all_max--Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-Results] >> test.py::test[pg-insert--ForceBlocks] [GOOD] >> test.py::test[pg-insert--Results] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--ForceBlocks] >> test.py::test[insert_monotonic-from_empty--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-from_empty--Results] >> test.py::test[weak_field-weak_field_join_where--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] >> test.py::test[insert-trivial_literals_multirow-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] >> test.py::test[hor_join-max_in_tables--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_some_filter--ForceBlocks] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] [GOOD] >> test.py::test[table_range-range_slash--ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[blocks-combine_all_max--Results] [GOOD] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] >> test.py::test[optimizers-yql-6038_direct_row--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] >> test.py::test[key_filter-yql_5895_or-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] [GOOD] >> test.py::test[join-premap_map_semi--ForceBlocks] >> test.py::test[blocks-string_len_and_cmp--ForceBlocks] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Results] >> test.py::test[sampling-yql-14664_deps-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] >> test.py::test[select-calculated_values-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-calculated_values-default.txt-Results] >> test.py::test[pg-tpcds-q08-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-ForceBlocks] >> test.py::test[pg-insert--Results] [GOOD] >> test.py::test[pg-name--ForceBlocks] >> test.py::test[aggr_factory-min-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] >> test.py::test[aggregate-group_by_rollup_key_check--Results] [GOOD] >> test.py::test[aggregate-group_by_session_nopush--Results] [SKIPPED] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] >> test.py::test[optimizers-yql-5978_fill_multi_usage--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] >> test.py::test[insert_monotonic-from_empty--Results] [GOOD] >> test.py::test[join-convert_key--ForceBlocks] >> test.py::test[pg-tpch-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] [GOOD] >> test.py::test[insert-yql-13083--ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[hor_join-max_in_tables--Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-ForceBlocks] >> test.py::test[window-full/aggregations_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] >> test.py::test[join-force_merge_join-default.txt-Results] [GOOD] >> test.py::test[join-join_comp_common_table-off-ForceBlocks] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] >> test.py::test[select-trivial_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[lineage-nested_lambda_fields-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-extend-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-extend-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[aggregate-group_by_session_only_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] >> test.py::test[join-premap_merge_extrasort2--Results] [GOOD] >> test.py::test[join-star_join_multi-off-ForceBlocks] >> test.py::test[lambda-lambda_with_tie-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] >> test.py::test[count-count_nullable--ForceBlocks] [GOOD] >> test.py::test[count-count_nullable--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort--ForceBlocks] >> test.py::test[select-calculated_values-default.txt-Results] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--ForceBlocks] >> test.py::test[window-win_multiaggr_library--ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] >> test.py::test[schema-fake_column-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--ForceBlocks] >> test.py::test[pg-tpcds-q03-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] >> test.py::test[join-full_equal_not_null-off-ForceBlocks] [GOOD] >> test.py::test[join-full_equal_not_null-off-Results] [SKIPPED] >> test.py::test[blocks-string_len_and_cmp--Results] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-ForceBlocks] >> test.py::test[udf-udf--ForceBlocks] [GOOD] >> test.py::test[udf-udf--Results] >> test.py::test[epochs-reset_sortness_on_append--ForceBlocks] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Results] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] [GOOD] >> test.py::test[lineage-select_field_filter-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[order_by-assume_with_filter--ForceBlocks] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Results] >> test.py::test[join-mapjoin_early_rewrite_star--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[select-trivial_where-many-ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[join-full_equal_not_null-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_some_filter--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_nullable--Results] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_some_filter--Results] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] >> test.py::test[aggr_factory-bitand-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Results] >> test.py::test[table_range-range_slash--ForceBlocks] [GOOD] >> test.py::test[table_range-range_slash--Results] >> test.py::test[aggregate-group_by_session_only--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] >> test.py::test[udf-udf--Results] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test.py::test[join-premap_no_premap--Results] [GOOD] >> test.py::test[join-premap_no_premap-off-ForceBlocks] >> test.py::test[aggregate-group_by_session_only_distinct--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped--ForceBlocks] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Results] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--ForceBlocks] >> test.py::test[join-premap_map_semi--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_semi--Results] >> test.py::test[pg-tpcds-q62-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Results] >> test.py::test[pg-tpcds-q31-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Results] >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] >> test.py::test[blocks-date_less_scalar--Results] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] [GOOD] >> test.py::test[window-full/leadlag--ForceBlocks] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] >> test.py::test[join-mapjoin_early_rewrite_star--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--ForceBlocks] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Results] [SKIPPED] >> test.py::test[window-win_func_aggr_4func_sort--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> test.py::test[pg-name--ForceBlocks] [GOOD] >> test.py::test[pg-name--Results] >> test.py::test[hor_join-less_outs--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-less_outs--Results] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--ForceBlocks] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Results] [SKIPPED] >> test.py::test[in-in_tuple_table-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_some_filter--Results] [GOOD] >> test.py::test[join-convert_key--ForceBlocks] [GOOD] >> test.py::test[join-convert_key--Results] >> test.py::test[blocks-combine_all_sum--ForceBlocks] >> test.py::test[table_range-range_slash--Results] [GOOD] >> test.py::test[table_range-range_tables_with_view--ForceBlocks] >> test.py::test[pg-tpcds-q62-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[udf-udf--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> test.py::test[pg-tpcds-q31-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q66-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_over_input_desc--ForceBlocks] >> test.py::test[join-premap_map_semi--Results] [GOOD] >> test.py::test[join-pullup_context_dep--ForceBlocks] >> test.py::test[schema-fake_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-fake_column-default.txt-Results] >> test.py::test[lineage-list_literal1-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Results] [SKIPPED] >> test.py::test[lineage-pullup_rename--ForceBlocks] [SKIPPED] >> test.py::test[lineage-pullup_rename--Results] [SKIPPED] >> test.py::test[optimizers-simplified_path_constraint--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-simplified_path_constraint--Results] [SKIPPED] >> test.py::test[optimizers-yql-12620_stage_multiuse--ForceBlocks] >> test.py::test[window-leading/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] >> test.py::test[optimizers-yt_shuffle_by_keys--ForceBlocks] >> test.py::test[optimizers-yt_shuffle_by_keys--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> test.py::test[order_by-SortByOneFieldDesc--ForceBlocks] >> test.py::test[select-from_in_front_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] >> test.py::test[produce-fuse_reduces_diff_sets--ForceBlocks] >> test.py::test[pg-name--Results] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_and_sum_by_value--ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--Results] >> test.py::test[case-case_size_eq_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--ForceBlocks] >> test.py::test[aggr_factory-bitand-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--ForceBlocks] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] |85.9%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--ForceBlocks] >> test.py::test[select-trivial_where-many-ForceBlocks] [GOOD] >> test.py::test[select-trivial_where-many-Results] >> test.py::test[join-convert_key--Results] [GOOD] >> test.py::test[join-convert_key-off-ForceBlocks] >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-read_schema-ForceBlocks] >> test.py::test[order_by-assume_with_filter--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_with_filter--Results] >> test.py::test[csee-yql-7237--ForceBlocks] >> test.py::test[insert-yql-13083--ForceBlocks] [GOOD] >> test.py::test[insert-yql-13083--Results] >> test.py::test[distinct-distinct_one_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] >> test.py::test[join-star_join_multi-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_multi-off-Results] [SKIPPED] >> test.py::test[join-two_aggrs-default.txt-ForceBlocks] >> test.py::test[case-case_size_eq_cast-default.txt-Results] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] [GOOD] >> test.py::test[window-win_func_special--Results] >> test.py::test[schema-user_schema_patch_columns--ForceBlocks] >> test.py::test[select-trivial_where-many-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-ForceBlocks] >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_and_sum_by_value--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort--Results] >> test.py::test[order_by-assume_with_filter--Results] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--ForceBlocks] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] >> test.py::test[in-in_tuple_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Results] >> test.py::test[table_range-concat_with_view--Results] >> test.py::test[join-mapjoin_with_empty_struct--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--Results] >> test.py::test[aggregate-percentiles_grouped--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_grouped--Results] >> test.py::test[table_range-range_tables_with_view--ForceBlocks] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> test.py::test[insert-yql-13083--Results] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-ForceBlocks] >> test.py::test[join-premap_no_premap-off-ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--Results] >> test.py::test[join-premap_no_premap-off-Results] [SKIPPED] >> test.py::test[join-pullup_random-off-ForceBlocks] >> test.py::test[distinct-distinct_one_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-ForceBlocks] >> test.py::test[join-pullup_context_dep--ForceBlocks] [GOOD] >> test.py::test[pg-pg_column_case--Results] >> test.py::test[blocks-combine_all_sum--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt-Results] >> test.py::test[pg-tpch-q06-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Results] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[aggregate-avg_and_sum_by_value--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> test.py::test[expr-double_join_with_list_from_range--ForceBlocks] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Results] >> test.py::test[join-pullup_context_dep--Results] >> test.py::test[aggregate-disable_blocks_with_spilling--ForceBlocks] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] [SKIPPED] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] >> test.py::test[pg-select_alias_partial-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] >> test.py::test[join-join_comp_common_table-off-ForceBlocks] [GOOD] >> test.py::test[join-join_comp_common_table-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o--ForceBlocks] |86.0%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] >> test.py::test[pg-tpcds-q66-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q66-default.txt-Results] >> test.py::test[produce-fuse_reduces_diff_sets--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[order_by-assume_over_input_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_over_input_desc--Results] >> test.py::test[produce-fuse_reduces_diff_sets--Results] >> test.py::test[in-in_tuple_table-default.txt-Results] [GOOD] >> test.py::test[in-yql-14677-default.txt-ForceBlocks] >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[tpch-q15-default.txt-ForceBlocks] >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[window-full/leadlag--ForceBlocks] [GOOD] >> test.py::test[window-full/leadlag--Results] >> test.py::test[aggregate-group_by_ru_join_agg--Results] >> test.py::test[join-mapjoin_with_empty_struct--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--ForceBlocks] >> test.py::test[schema-select_all-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-read_schema-Results] >> test.py::test[order_by-SortByOneFieldDesc--Results] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-ForceBlocks] >> test.py::test[aggregate-agg_filter_pushdown--ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--Results] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] [GOOD] >> test.py::test[order_by-changed_sort_with_limit--ForceBlocks] [SKIPPED] >> test.py::test[order_by-changed_sort_with_limit--Results] [SKIPPED] >> test.py::test[pg-join_using_tables3-default.txt-ForceBlocks] >> test.py::test[join-convert_key-off-ForceBlocks] [GOOD] >> test.py::test[join-convert_key-off-Results] [SKIPPED] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] >> test.py::test[case-case_val_then_else-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[blocks-combine_all_sum--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--ForceBlocks] >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[sampling-system_sampling-io_block_size-ForceBlocks] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test.py::test[sampling-table_content--ForceBlocks] >> test.py::test[order_by-assume_over_input_desc--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] >> test.py::test[pg-select_alias_partial-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q66-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q71-default.txt-ForceBlocks] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--Results] >> test.py::test[join-pullup_context_dep--Results] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-Results] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] >> test.py::test[schema-user_schema_patch_columns--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] >> test.py::test[produce-fuse_reduces_diff_sets--Results] [GOOD] >> test.py::test[join-two_aggrs-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-two_aggrs-default.txt-Results] >> test.py::test[produce-process_rows_and_filter--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in-empty-ForceBlocks] >> test.py::test[schema-select_all-read_schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema2--ForceBlocks] >> test.py::test[join-simple_columns_partial-off-ForceBlocks] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[csee-yql-7237--ForceBlocks] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-ForceBlocks] >> test.py::test[case-case_val_then_else-default.txt-Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--ForceBlocks] [GOOD] >> test.py::test[column_group-groups-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[window-win_func_spec_with_part--Results] >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--ForceBlocks] >> test.py::test[csee-yql-7237--Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--ForceBlocks] >> test.py::test[aggregate-percentiles_grouped--Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-wo_compat-Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-ForceBlocks] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[order_by-assume_over_input_desc--Results] [GOOD] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--ForceBlocks] >> test.py::test[expr-double_join_with_list_from_range--Results] [GOOD] >> test.py::test[expr-empty_iterator2--ForceBlocks] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[tpch-q16-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-ForceBlocks] >> test.py::test[window-win_func_special--Results] [GOOD] >> test.py::test[window-win_inline_spec-default.txt-Results] >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[pg-point-default.txt-ForceBlocks] >> test.py::test[aggr_factory-multi--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-multi--Results] >> test.py::test[join-pullup_random-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_random-off-Results] [SKIPPED] >> test.py::test[join-right_trivial-off-ForceBlocks] >> test.py::test[select-one_unlabeled_column-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-ForceBlocks] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] [GOOD] >> test.py::test[select-host_count--ForceBlocks] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test.py::test[insert_monotonic-keep_meta-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] >> test.py::test[join-lookupjoin_semi_2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Results] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] >> test.py::test[in-yql-14677-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-yql-14677-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-ForceBlocks] >> test.py::test[window-full/leadlag--Results] [GOOD] >> test.py::test[window-full/session_compact--ForceBlocks] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-Results] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] [GOOD] >> test.py::test[join-bush_dis_in--ForceBlocks] >> test.py::test[join-two_aggrs-default.txt-Results] [GOOD] >> test.py::test[join-yql-8980-off-ForceBlocks] >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-ForceBlocks] >> test.py::test[dq-precompute_tree-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_inner_select_fail--ForceBlocks] >> test.py::test[pg-tpcds-q71-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q71-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_cross--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [SKIPPED] >> test.py::test[pg-join_using_tables3-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--ForceBlocks] >> test.py::test[pg-join_using_tables3-default.txt-Results] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-date_add_interval--ForceBlocks] >> test.py::test[join-lookupjoin_semi_2o--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--ForceBlocks] >> test.py::test[tpch-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] >> test.py::test[insert-from_erasure_to_none--ForceBlocks] >> test.py::test[union_all-mix_map_and_project--ForceBlocks] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--ForceBlocks] >> test.py::test[blocks-date_not_equals_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Results] >> test.py::test[sampling-table_content--ForceBlocks] [GOOD] >> test.py::test[sampling-table_content--Results] >> test.py::test[schema-select_all_inferschema2--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema2--Results] >> test.py::test[csee-yql-7237--Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] [GOOD] >> test.py::test[aggregate-list_with_fold_map--ForceBlocks] >> test.py::test[pg-tpcds-q25-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test.py::test[pg-tpcds-q71-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q88-default.txt-ForceBlocks] >> test.py::test[pg-point-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-point-default.txt-Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] >> test.py::test[join-simple_columns_partial-off-ForceBlocks] [GOOD] >> test.py::test[join-simple_columns_partial-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key-off-ForceBlocks] >> test.py::test[binding-table_filter_strict_binding-default.txt-ForceBlocks] [GOOD] |86.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] >> test.py::test[expr-empty_iterator2--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_inner_select_fail--ForceBlocks] [GOOD] >> test.py::test[expr-empty_iterator2--Results] >> test.py::test[expr-non_persistable_inner_select_fail--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--ForceBlocks] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> test.py::test[blocks-distinct_opt_state_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] >> test.py::test[produce-reduce_multi_in-empty-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] >> test.py::test[join-right_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-right_trivial-off-Results] [SKIPPED] >> test.py::test[join-strict_keys--ForceBlocks] >> test.py::test[pg-join_using_tables4-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref2-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] >> test.py::test[action-dep_world_action_quote-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[select-host_count--ForceBlocks] [GOOD] >> test.py::test[select-host_count--Results] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-ForceBlocks] >> test.py::test[pg-point-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Results] [SKIPPED] >> test.py::test[bigdate-table_io-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--ForceBlocks] >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[tpch-q19-default.txt-ForceBlocks] >> test.py::test[schema-select_all_inferschema2--Results] [GOOD] >> test.py::test[schema-select_reordered-default.txt-ForceBlocks] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-ForceBlocks] >> test.py::test[tpch-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int64--ForceBlocks] >> test.py::test[sampling-table_content--Results] [GOOD] >> test.py::test[schema-select_all-schema-ForceBlocks] >> test.py::test[in-in_immediate_subquery-default.txt-Results] >> test.py::test[window-win_inline_spec-default.txt-Results] [GOOD] >> test.py::test[ypath-empty_range--Results] [SKIPPED] >> test.py::test[ypath-limit_with_key-default.txt-Results] >> test.py::test[expr-empty_iterator2--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-ForceBlocks] >> test.py::test[pg-join_using_tables3-default.txt-Results] [GOOD] >> test.py::test[pg-nulls_native-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_tablerecord-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-ForceBlocks] >> test.py::test[pg-wide_sort--Results] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--ForceBlocks] >> test.py::test[bigdate-table_common_type-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--ForceBlocks] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[join-strict_keys--ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_tuples-default.txt-Results] >> test.py::test[join-yql-8980-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-8980-off-Results] [SKIPPED] >> test.py::test[key_filter-lambda_with_null_filter--ForceBlocks] >> test.py::test[aggr_factory-max_by-default.txt-ForceBlocks] >> test.py::test[window-generic/aggregations_mixed_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--ForceBlocks] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-ForceBlocks] >> test.py::test[window-win_multiaggr_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[join-strict_keys--ForceBlocks] [GOOD] >> test.py::test[insert-from_erasure_to_none--ForceBlocks] [GOOD] >> test.py::test[insert-from_erasure_to_none--Results] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder--ForceBlocks] >> test.py::test[select-host_count--Results] [GOOD] >> test.py::test[select-result_size_limit--ForceBlocks] [SKIPPED] >> test.py::test[select-result_size_limit--Results] >> test.py::test[union_all-mix_map_and_project--ForceBlocks] [GOOD] >> test.py::test[union_all-mix_map_and_project--Results] >> test.py::test[join-bush_dis_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in--Results] >> test.py::test[select-result_size_limit--Results] [SKIPPED] >> test.py::test[select-scalar_subquery_with_star-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_hop_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--ForceBlocks] >> test.py::test[dq-blacklisted_pragmas--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] [GOOD] >> test.py::test[window-win_func_special--ForceBlocks] >> test.py::test[dq-blacklisted_pragmas--Results] [SKIPPED] >> test.py::test[dq-join_cbo_native_3_tables--ForceBlocks] |86.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[bigdate-table_common_type-default.txt-Results] [GOOD] >> test.py::test[blocks-date_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[pg-tpcds-q88-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q88-default.txt-Results] >> test.py::test[case-case_multi_val-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] >> test.py::test[join-join_without_correlation_and_dict_access--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] >> test.py::test[join-split_to_list_as_key-off-ForceBlocks] [GOOD] >> test.py::test[join-split_to_list_as_key-off-Results] [SKIPPED] >> test.py::test[key_filter-convert--ForceBlocks] |86.0%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-date_not_equals_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal--ForceBlocks] >> test.py::test[aggregate-list_with_fold_map--ForceBlocks] [GOOD] >> test.py::test[aggregate-list_with_fold_map--Results] >> test.py::test[window-full/session_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/session_compact--Results] >> test.py::test[pg-tpcds-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] >> test.py::test[pg-tpcds-q50-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] >> test.py::test[insert-from_erasure_to_none--Results] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] >> test.py::test[pg-select_starref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_fail--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-ForceBlocks] >> test.py::test[union_all-mix_map_and_project--Results] [GOOD] >> test.py::test[view-secure_eval_dyn--ForceBlocks] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--ForceBlocks] >> test.py::test[schema-select_all-schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-schema-Results] >> test.py::test[schema-select_reordered-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_reordered-default.txt-Results] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[tpch-q2-default.txt-ForceBlocks] >> test.py::test[blocks-add_int64--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int64--Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-ForceBlocks] >> test.py::test[pg-wide_sort--Results] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--ForceBlocks] >> test.py::test[flatten_by-flatten_dict--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] >> test.py::test[pg-tpcds-q88-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-ForceBlocks] >> test.py::test[ypath-limit_with_key-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_range-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_nested--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-ForceBlocks] >> test.py::test[tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[aggr_factory-some-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q50-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] >> test.py::test[pg-tpcds-q69-default.txt-ForceBlocks] >> test.py::test[schema-select_all-schema-Results] [GOOD] >> test.py::test[aggregate-list_with_fold_map--Results] [GOOD] >> test.py::test[blocks-add_decimal--ForceBlocks] >> test.py::test[schema-select_reordered-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--Results] >> test.py::test[schema-select_field-row_spec-ForceBlocks] >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[insert-merge_publish--ForceBlocks] >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--ForceBlocks] >> test.py::test[view-secure_eval_dyn--ForceBlocks] [GOOD] >> test.py::test[view-secure_eval_dyn--Results] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--ForceBlocks] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[blocks-date_group_by--ForceBlocks] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys--ForceBlocks] >> test.py::test[produce-process_multi_out_bad_count_fail--ForceBlocks] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] >> test.py::test[pg-select_starref2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-ForceBlocks] >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-generic/session_aliases--ForceBlocks] >> test.py::test[action-eval_folder--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder--Results] >> test.py::test[select-scalar_subquery_with_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] [GOOD] >> test.py::test[produce-reduce_with_python_filter_and_having--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] [SKIPPED] >> test.py::test[ql_filter-integer_many_left--ForceBlocks] >> test.py::test[join-bush_dis_in--Results] [GOOD] >> test.py::test[join-bush_dis_in-off-ForceBlocks] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--ForceBlocks] >> test.py::test[bigdate-table_io-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] >> test.py::test[join-join_without_correlation_and_dict_access--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-ForceBlocks] >> test.py::test[key_filter-lambda_with_null_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[aggregate-group_by_hop_only--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[schema-select_reordered-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] >> test.py::test[dq-join_cbo_native_3_tables--ForceBlocks] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-ForceBlocks] >> test.py::test[flatten_by-flatten_dict--Results] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-ForceBlocks] >> test.py::test[pg-select_qstarref2-default.txt-Results] [GOOD] >> test.py::test[case-case_multi_val-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-table_func-default.txt-ForceBlocks] >> test.py::test[case-case_multi_val-default.txt-Results] >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-decimal--ForceBlocks] >> test.py::test[action-eval_folder--Results] [GOOD] >> test.py::test[action-export_action--ForceBlocks] >> test.py::test[pg-nulls_native-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-nulls_native-default.txt-Results] >> test.py::test[blocks-decimal_op_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_op_decimal--Results] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] >> test.py::test[window-win_func_special--ForceBlocks] [GOOD] >> test.py::test[window-win_func_special--Results] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] [GOOD] >> test.py::test[join-premap_common_multiparents-off-ForceBlocks] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_regexp--ForceBlocks] >> test.py::test[key_filter-convert--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> test.py::test[join-mergejoin_force_align2-off-ForceBlocks] >> test.py::test[aggr_factory-max_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-max_by-default.txt-Results] >> test.py::test[aggregate-group_by_expr_only_join--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty--ForceBlocks] >> test.py::test[ypath-multi_range-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-multi_key-default.txt-Results] >> test.py::test[case-case_multi_val-default.txt-Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q63-default.txt-Results] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[key_filter-convert--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-ForceBlocks] >> test.py::test[blocks-decimal_op_decimal--Results] [GOOD] >> test.py::test[blocks-interval_div--ForceBlocks] >> test.py::test[schema-select_field-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-row_spec-Results] >> test.py::test[blocks-add_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-add_decimal--Results] >> test.py::test[blocks-filter_by_column_with_drop--Results] [GOOD] >> test.py::test[blocks-filter_direct_col--ForceBlocks] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[key_filter-mixed_sort--ForceBlocks] >> test.py::test[tpch-q2-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] >> test.py::test[pg-nulls_native-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q69-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[join-order_of_qualified--ForceBlocks] >> test.py::test[pg-tpcds-q28-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Results] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[ypath-multi_range-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-ForceBlocks] >> test.py::test[window-win_func_special--Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-ForceBlocks] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[blocks-sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_python_stream--ForceBlocks] [SKIPPED] >> test.py::test[weak_field-hor_join_with_mix_weak_access--ForceBlocks] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] >> test.py::test[ql_filter-integer_many_left--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_left--Results] >> test.py::test[produce-reduce_with_flat_python_stream--Results] [SKIPPED] >> test.py::test[sampling-bind_multiple_sample-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q33-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-some-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Results] >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q33-default.txt-Results] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-ForceBlocks] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] >> test.py::test[blocks-add_decimal--Results] [GOOD] >> test.py::test[blocks-block_input_various_types_2--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2--Results] [SKIPPED] >> test.py::test[blocks-combine_all_count--ForceBlocks] >> test.py::test[window-win_func_aggr_4func_sort_desc--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] >> test.py::test[join-mapjoin_partial_uniq_keys--ForceBlocks] [GOOD] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test.py::test[blocks-date_group_by--ForceBlocks] [GOOD] >> test.py::test[blocks-date_group_by--Results] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] >> test.py::test[aggr_factory-max_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q69-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] >> test.py::test[action-export_action--ForceBlocks] [GOOD] >> test.py::test[action-export_action--Results] >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] >> test.py::test[join-join_without_correlation_and_dict_access-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--ForceBlocks] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] >> test.py::test[join-bush_dis_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in-off-ForceBlocks] >> test.py::test[insert-append_missing_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-Results] >> test.py::test[pg-tpcds-q28-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_many_left--Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--ForceBlocks] >> test.py::test[insert-merge_publish--ForceBlocks] [GOOD] >> test.py::test[insert-merge_publish--Results] >> test.py::test[table_range-range_over_regexp--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_regexp--Results] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] [GOOD] >> test.py::test[window-distinct_over_window--ForceBlocks] >> test.py::test[pg-tpcds-q33-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel--Results] [SKIPPED] >> test.py::test[dq-wrong_script--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_list_on_flatten_by--ForceBlocks] >> test.py::test[hor_join-runtime_dep-default.txt-Results] >> test.py::test[join-premap_common_multiparents-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi--ForceBlocks] >> test.py::test[case-case_then_else-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-ForceBlocks] >> test.py::test[aggr_factory-some-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-ForceBlocks] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test.py::test[type_v3-non_strict--ForceBlocks] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] >> test.py::test[insert-append_missing_null-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-ForceBlocks] >> test.py::test[action-export_action--Results] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-ForceBlocks] >> test.py::test[table_range-range_over_regexp--Results] [GOOD] >> test.py::test[tpch-q6-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--ForceBlocks] >> test.py::test[join-mergejoin_force_align2-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align2-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on-off-ForceBlocks] >> test.py::test[blocks-filter_direct_col--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_direct_col--Results] >> test.py::test[blocks-interval_div--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_div--Results] >> test.py::test[blocks-date_group_by--Results] [GOOD] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] >> test.py::test[blocks-pg_top_sort--ForceBlocks] >> test.py::test[pg-select_qstarref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] >> test.py::test[key_filter-mixed_sort--ForceBlocks] [GOOD] >> test.py::test[key_filter-mixed_sort--Results] >> test.py::test[flatten_by-flatten_with_group_by--Results] [GOOD] >> test.py::test[hor_join-out_sampling--ForceBlocks] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_dup_col_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--Results] [SKIPPED] >> test.py::test[column_order-ordered_plus_native--ForceBlocks] >> test.py::test[insert-merge_publish--Results] [GOOD] >> test.py::test[insert-two_input_tables--ForceBlocks] >> test.py::test[join-order_of_qualified--ForceBlocks] [GOOD] >> test.py::test[join-order_of_qualified--Results] >> test.py::test[order_by-order_by_dynum-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Results] >> test.py::test[aggregate-group_by_gs_few_empty--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty--Results] >> test.py::test[window-generic/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-generic/session_aliases--Results] >> test.py::test[blocks-coalesce_complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] >> test.py::test[pg-tpcds-q76-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] >> test.py::test[key_filter-decimal--ForceBlocks] [GOOD] >> test.py::test[key_filter-decimal--Results] >> test.py::test[blocks-filter_direct_col--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--ForceBlocks] >> test.py::test[blocks-combine_all_count--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count--Results] >> test.py::test[pg-tpcds-q42-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-Results] >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q68-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-Results] >> test.py::test[sampling-bind_multiple_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] >> test.py::test[pg-table_func-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] >> test.py::test[pg-select_table2-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> test.py::test[blocks-interval_div--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar--ForceBlocks] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--ForceBlocks] >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[key_filter-mixed_sort--Results] [GOOD] >> test.py::test[key_filter-multiusage--ForceBlocks] >> test.py::test[order_by-order_by_dynum-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf--ForceBlocks] >> test.py::test[join-bush_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in-off-Results] [SKIPPED] >> test.py::test[join-count_bans-off-ForceBlocks] >> test.py::test[select-unlabeled--Results] >> test.py::test[pg-tpcds-q42-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-ForceBlocks] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] >> test.py::test[flatten_by-flatten_list_on_flatten_by--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> test.py::test[in-huge_in-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q68-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-ForceBlocks] >> test.py::test[join-premap_common_semi--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_semi--Results] >> test.py::test[blocks-combine_all_count--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--ForceBlocks] >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Results] >> test.py::test[sampling-join_left_sample-default.txt-ForceBlocks] >> test.py::test[type_v3-non_strict--ForceBlocks] [GOOD] >> test.py::test[type_v3-non_strict--Results] >> test.py::test[blocks-coalesce_complex-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--ForceBlocks] >> test.py::test[join-mergejoin_big_primary_unique-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry--ForceBlocks] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-ForceBlocks] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] >> test.py::test[blocks-pg_top_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-ForceBlocks] >> test.py::test[join-nopushdown_filter_with_depends_on-off-ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi-off-ForceBlocks] >> test.py::test[action-eval_each_input_table-default.txt-ForceBlocks] >> test.py::test[key_filter-decimal--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--ForceBlocks] >> test.py::test[tpch-q6-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_no_aggregate_split--ForceBlocks] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> test.py::test[window-win_func_in_lib--ForceBlocks] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--ForceBlocks] >> test.py::test[aggregate-group_by_gs_few_empty--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-median-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] >> test.py::test[column_order-ordered_plus_native--ForceBlocks] [GOOD] >> test.py::test[column_order-ordered_plus_native--Results] >> test.py::test[insert-two_input_tables--ForceBlocks] [GOOD] >> test.py::test[insert-two_input_tables--Results] >> test.py::test[join-full_trivial--ForceBlocks] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[view-secure_eval--ForceBlocks] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--ForceBlocks] >> test.py::test[pg-tpch-q02-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Results] >> test.py::test[insert-multiappend_sorted-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--ForceBlocks] >> test.py::test[action-evaluate_pure--ForceBlocks] >> test.py::test[join-premap_common_semi--Results] [GOOD] >> test.py::test[join-premap_common_semi-off-ForceBlocks] >> test.py::test[join-mergejoin_force_align3--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align3--Results] >> test.py::test[library-package--ForceBlocks] [SKIPPED] >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[library-package_override--ForceBlocks] [SKIPPED] >> test.py::test[library-package_override--Results] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--ForceBlocks] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_force_align3--Results] [SKIPPED] >> test.py::test[join-mergejoin_semi_composite_to_inner--ForceBlocks] >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt--ForceBlocks] >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-ForceBlocks] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] [GOOD] >> test.py::test[pg-select_table2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] >> test.py::test[schema-user_schema_existing_column--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-ForceBlocks] >> test.py::test[hor_join-out_sampling--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[pg-tpcds-q73-default.txt-Results] >> test.py::test[column_order-ordered_plus_native--Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] >> test.py::test[blocks-interval_sub_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar--Results] >> test.py::test[window-distinct_over_window--ForceBlocks] [GOOD] >> test.py::test[window-distinct_over_window--Results] >> test.py::test[order_by-order_by_udf--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_udf--Results] >> test.py::test[pg-tpcds-q82-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Results] >> test.py::test[view-secure_eval--ForceBlocks] [GOOD] >> test.py::test[view-secure_eval--Results] [GOOD] >> test.py::test[view-trivial_view_concat--ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] >> test.py::test[insert_monotonic-truncate_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--Results] >> test.py::test[key_filter-multiusage--ForceBlocks] [GOOD] >> test.py::test[key_filter-multiusage--Results] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix1--ForceBlocks] |86.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part15/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[insert_monotonic-truncate_fail--Results] [GOOD] >> test.py::test[join-inner_with_order-off-ForceBlocks] >> test.py::test[blocks-combine_all_sum_filter--ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] >> test.py::test[blocks-combine_all_sum_filter--Results] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_choose_primary_with_retry--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] >> test.py::test[pg-tpcds-q73-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_count_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Results] >> test.py::test[pg-select_table2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-ForceBlocks] >> test.py::test[join-premap_map_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_map_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2-off-ForceBlocks] >> test.py::test[pg-tpcds-q82-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-ForceBlocks] >> test.py::test[sampling-join_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-join_left_sample-default.txt-Results] >> test.py::test[sampling-join_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> test.py::test[order_by-order_by_udf--Results] [GOOD] >> test.py::test[pg-nulls-default.txt-ForceBlocks] >> test.py::test[table_range-tablepath_with_non_existing--ForceBlocks] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Results] >> test.py::test[blocks-interval_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--ForceBlocks] >> test.py::test[join-count_bans-off-ForceBlocks] [GOOD] >> test.py::test[join-count_bans-off-Results] >> test.py::test[blocks-interval_mul_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] >> test.py::test[join-count_bans-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns2--ForceBlocks] >> test.py::test[action-eval_each_input_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-ForceBlocks] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] [GOOD] >> test.py::test[schema-user_schema_no_infer--ForceBlocks] >> test.py::test[insert-append_sorted-to_sorted_desc-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] >> test.py::test[action-subquery_merge_nested_subquery--ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Results] >> test.py::test[blocks-combine_all_sum_filter--Results] [GOOD] >> test.py::test[blocks-compare--ForceBlocks] >> test.py::test[flatten_by-flatten_member_is_struct--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] >> test.py::test[join-full_trivial--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial--Results] >> test.py::test[in-huge_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-huge_in-default.txt-Results] >> test.py::test[pg-tpcds-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Results] >> test.py::test[blocks-combine_all_count_filter_opt--Results] [GOOD] >> test.py::test[coalesce-coalesce--ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[optimizers-test_no_aggregate_split--ForceBlocks] [GOOD] >> test.py::test[optimizers-test_no_aggregate_split--Results] >> test.py::test[join-pullup_extra_columns--ForceBlocks] >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test.py::test[tpch-q4-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-ForceBlocks] >> test.py::test[window-win_func_in_lib--ForceBlocks] [GOOD] >> test.py::test[window-win_func_in_lib--Results] >> test.py::test[join-premap_common_semi-off-ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt--Results] >> test.py::test[window-distinct_over_window--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--ForceBlocks] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-ForceBlocks] >> test.py::test[join-premap_common_semi-off-Results] [SKIPPED] >> test.py::test[join-pullup_extend--ForceBlocks] >> test.py::test[key_filter-multiusage--Results] [GOOD] >> test.py::test[key_filter-no_bypass_merge--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--Results] [SKIPPED] >> test.py::test[key_filter-string_with_legacy--ForceBlocks] >> test.py::test[action-subquery_merge_nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_simp--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--Results] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] >> test.py::test[action-evaluate_pure--ForceBlocks] [GOOD] >> test.py::test[action-evaluate_pure--Results] >> test.py::test[schema-select_all_forceinferschema--ForceBlocks] [SKIPPED] >> test.py::test[schema-select_all_forceinferschema--Results] [SKIPPED] >> test.py::test[schema-user_schema_with_sort--ForceBlocks] >> test.py::test[join-mergejoin_semi_composite_to_inner--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-ForceBlocks] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] [GOOD] >> test.py::test[insert-append_with_read_udf_fail--ForceBlocks] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q20-default.txt-Results] [GOOD] >> test.py::test[pg-wide_top_sort--ForceBlocks] >> test.py::test[flatten_by-flatten_member_is_struct--Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-ForceBlocks] >> test.py::test[blocks-interval_mul_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_asc--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[view-trivial_view_concat--ForceBlocks] [GOOD] >> test.py::test[view-trivial_view_concat--Results] |86.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part15/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[type_v3-decimal_yt--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q45-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] >> test.py::test[insert_monotonic-several2-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-Results] >> test.py::test[join-full_trivial--Results] [GOOD] >> test.py::test[join-full_trivial-off-ForceBlocks] >> test.py::test[join-inner_with_order-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left--ForceBlocks] >> test.py::test[schema-user_schema_mix1--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix1--Results] >> test.py::test[window-win_func_in_lib--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] >> test.py::test[in-huge_in-default.txt-Results] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q19-default.txt-ForceBlocks] >> test.py::test[aggregate-native_desc_group_compact_by--ForceBlocks] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q77-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-Results] >> test.py::test[action-evaluate_pure--Results] [GOOD] >> test.py::test[action-parallel_for-default.txt-ForceBlocks] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_no_infer--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_no_infer--Results] >> test.py::test[join-premap_merge_extrasort2-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort2-off-Results] [SKIPPED] >> test.py::test[join-pullup_context_dep-off-ForceBlocks] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] >> test.py::test[insert-append_with_read_udf_fail--ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Results] >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[pg-tpcds-q45-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-ForceBlocks] >> test.py::test[join-flatten_columns2--ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns2--Results] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[expr-inline_call--ForceBlocks] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[weak_field-weak_field_data--ForceBlocks] >> test.py::test[schema-user_schema_mix1--Results] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-several2-default.txt-Results] [GOOD] >> test.py::test[join-compact_join--ForceBlocks] >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[blocks-string_filter--ForceBlocks] >> test.py::test[coalesce-coalesce--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce--Results] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[pg-tpcds-q77-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q08-default.txt-ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] >> test.py::test[blocks-compare--ForceBlocks] [GOOD] >> test.py::test[blocks-compare--Results] >> test.py::test[join-pullup_extra_columns--ForceBlocks] [GOOD] >> test.py::test[join-pullup_extra_columns--Results] >> test.py::test[schema-user_schema_no_infer--Results] [GOOD] >> test.py::test[select-literal_negative-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[insert-append_with_read_udf_fail--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--Results] [GOOD] >> test.py::test[pg-nulls-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-ForceBlocks] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Results] [SKIPPED] >> test.py::test[join-premap_common_left_cross-off-ForceBlocks] >> test.py::test[pg-tpcds-q86-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] >> test.py::test[join-pullup_extend--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce--Results] [GOOD] >> test.py::test[column_group-hint_anon-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[column_group-publish-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-publish-perusage-Results] [SKIPPED] >> test.py::test[column_order-align_publish_native--ForceBlocks] >> test.py::test[tpch-q4-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q4-default.txt-Results] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--ForceBlocks] >> test.py::test[pg-wide_top_sort--ForceBlocks] [GOOD] >> test.py::test[pg-wide_top_sort--Results] >> test.py::test[window-win_multiaggr_list-default.txt-Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--ForceBlocks] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] >> test.py::test[key_filter-string_with_legacy--ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_legacy--Results] >> test.py::test[join-pullup_extra_columns--Results] [GOOD] >> test.py::test[join-pullup_null_column--ForceBlocks] >> test.py::test[schema-user_schema_with_sort--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_with_sort--Results] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[aggregate-group_by_gs_simp--Results] [GOOD] >> test.py::test[join-flatten_columns2--Results] [GOOD] >> test.py::test[join-grace_join1-grace-ForceBlocks] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] >> test.py::test[hor_join-double_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] >> test.py::test[blocks-lazy_nonstrict_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] >> test.py::test[blocks-compare--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[join-pullup_extend--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-Results] >> test.py::test[aggr_factory-avg_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[aggregate-aggregation_and_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] >> test.py::test[pg-nulls-default.txt-Results] [GOOD] >> test.py::test[pg-pg_column_case--ForceBlocks] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] [GOOD] >> test.py::test[blocks-minmax_strings_filter--ForceBlocks] >> test.py::test[window-full/noncompact_with_nulls--ForceBlocks] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Results] >> test.py::test[pg-tpcds-q70-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] >> test.py::test[join-full_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial-off-Results] [SKIPPED] >> test.py::test[join-group_compact_by--ForceBlocks] >> test.py::test[pg-wide_top_sort--Results] [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_data--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_data--Results] >> test.py::test[pg-tpcds-q81-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[join-pullup_context_dep-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_context_dep-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key--ForceBlocks] >> test.py::test[tpch-q4-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_pull--Results] >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_nested_left--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] >> test.py::test[pg-tpcds-q86-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> test.py::test[blocks-member--ForceBlocks] >> test.py::test[expr-inline_call--ForceBlocks] [GOOD] >> test.py::test[expr-inline_call--Results] >> test.py::test[hor_join-double_input-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1--Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] >> test.py::test[schema-user_schema_with_sort--Results] [GOOD] >> test.py::test[select-bit_ops-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-string_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-string_filter--Results] >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[limit-zero_limit-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] [GOOD] >> test.py::test[tpch-q5-default.txt-ForceBlocks] >> test.py::test[action-parallel_for-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-parallel_for-default.txt-Results] >> test.py::test[select-literal_negative-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_negative-default.txt-Results] >> test.py::test[join-compact_join--ForceBlocks] [GOOD] >> test.py::test[join-compact_join--Results] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[schema-limit_directread--ForceBlocks] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_part-ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[hor_join-fuse_multi_outs2-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] >> test.py::test[udf-udaf_short--ForceBlocks] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--ForceBlocks] >> test.py::test[bigdate-tz_table_pull--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] >> test.py::test[aggregate-native_desc_group_compact_by--ForceBlocks] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] >> test.py::test[expr-inline_call--Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-ForceBlocks] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-ForceBlocks] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-ForceBlocks] >> test.py::test[join-premap_common_left_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-off-ForceBlocks] >> test.py::test[weak_field-weak_field_data--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--ForceBlocks] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_scalar--Results] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_left--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_left--Results] [SKIPPED] >> test.py::test[join-premap_common_cross-off-ForceBlocks] >> test.py::test[ypath-direct_read_from_dynamic--ForceBlocks] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] >> test.py::test[select-literal_negative-default.txt-Results] [GOOD] >> test.py::test[select-optional_pull--ForceBlocks] >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregation_by_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--Results] >> test.py::test[aggr_factory-avg-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[pg-tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-grace-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test.py::test[join-pullup_null_column--ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> test.py::test[action-parallel_for-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-ForceBlocks] >> test.py::test[column_order-align_publish_native--ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish_native--Results] >> test.py::test[blocks-date_sub--ForceBlocks] >> test.py::test[window-full/noncompact_with_nulls--Results] [GOOD] >> test.py::test[window-leading/aggregations--ForceBlocks] >> test.py::test[like-regexp_clause--ForceBlocks] >> test.py::test[produce-process_with_udf_rows-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] >> test.py::test[pg-pg_column_case--ForceBlocks] [GOOD] >> test.py::test[join-compact_join--Results] [GOOD] >> test.py::test[join-inner_grouped--ForceBlocks] >> test.py::test[pg-tpch-q08-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q08-default.txt-Results] >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test.py::test[join-group_compact_by--ForceBlocks] [GOOD] >> test.py::test[join-group_compact_by--Results] >> test.py::test[aggregate-aggregation_by_udf--Results] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-ForceBlocks] >> test.py::test[blocks-distinct_pure_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] >> test.py::test[select-bit_ops-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-bit_ops-default.txt-Results] >> test.py::test[blocks-minmax_strings_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_strings_filter--Results] >> test.py::test[column_order-align_publish_native--Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-ForceBlocks] >> test.py::test[join-split_to_list_as_key--ForceBlocks] [GOOD] >> test.py::test[join-split_to_list_as_key--Results] >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--Results] >> test.py::test[pg-tpcds-q72-default.txt-ForceBlocks] >> test.py::test[limit-zero_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Results] >> test.py::test[schema-select_all-row_spec_part-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Results] >> test.py::test[blocks-member--ForceBlocks] [GOOD] >> test.py::test[blocks-member--Results] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] [GOOD] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-ForceBlocks] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--ForceBlocks] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[pg-pg_column_case--ForceBlocks] [GOOD] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:10:17] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:10:18] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:10:19] "GET /foo.txt HTTP/1.1" 200 - >> test.py::test[ansi_idents-basic_columns-default.txt-ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] >> test.py::test[schema-limit_directread--ForceBlocks] [GOOD] >> test.py::test[schema-limit_directread--Results] >> test.py::test[file-parse_file_in_select_as_uint64--ForceBlocks] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] >> test.py::test[udf-udaf_short--ForceBlocks] [GOOD] >> test.py::test[udf-udaf_short--Results] >> test.py::test[aggregate-group_by_gs_alt_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] >> test.py::test[expr-tagged_runtime-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] >> test.py::test[select-dot_name_subrequest-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] >> test.py::test[select-bit_ops-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-ForceBlocks] >> test.py::test[schema-select_all-row_spec_part-Results] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-ForceBlocks] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_assume--ForceBlocks] >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_assume--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_assume--Results] [SKIPPED] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] >> test.py::test[tpch-q5-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] >> test.py::test[join-premap_common_right_tablecontent-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Results] [SKIPPED] >> test.py::test[schema-limit_directread--Results] [GOOD] >> test.py::test[schema-remap_desc--ForceBlocks] >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] >> test.py::test[join-premap_common_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner--ForceBlocks] >> test.py::test[aggr_factory-booland-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Results] >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_all--ForceBlocks] >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint32--ForceBlocks] >> test.py::test[blocks-minmax_strings_filter--Results] [GOOD] >> test.py::test[blocks-mod_uint64--ForceBlocks] >> test.py::test[pg-tpch-q08-default.txt-Results] [GOOD] >> test.py::test[pg-wide_sort--ForceBlocks] >> test.py::test[aggregate-agg_phases_table1-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[join-star_join-off-ForceBlocks] >> test.py::test[select-dot_name_subrequest-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_with_view--ForceBlocks] >> test.py::test[like-regexp_clause--ForceBlocks] [GOOD] >> test.py::test[like-regexp_clause--Results] >> test.py::test[select-optional_pull--ForceBlocks] [GOOD] >> test.py::test[select-optional_pull--Results] >> test.py::test[udf-udaf_short--Results] [GOOD] >> test.py::test[union_all-infer_3-default.txt-ForceBlocks] >> test.py::test[expr-tagged_runtime-default.txt-Results] [GOOD] >> test.py::test[hor_join-yield_off--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-yield_off--Results] [SKIPPED] >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] >> test.py::test[action-eval_anon_table--ForceBlocks] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--ForceBlocks] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--ForceBlocks] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[join-inner_grouped--ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped--Results] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[order_by-native_desc_sort-over_sorted-ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[pg-join_using_multiple2--ForceBlocks] >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--ForceBlocks] >> test.py::test[aggregate-avg_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-Results] >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--ForceBlocks] >> test.py::test[like-regexp_clause--Results] [GOOD] >> test.py::test[lineage-isolated-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-insert_fill--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-insert_fill--Results] [SKIPPED] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-ForceBlocks] >> test.py::test[window-current/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] >> test.py::test[insert-values_subquery--ForceBlocks] >> test.py::test[pg-tpcds-q72-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] >> test.py::test[insert-values_subquery--ForceBlocks] [SKIPPED] >> test.py::test[insert-values_subquery--Results] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail--Results] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Results] [SKIPPED] >> test.py::test[join-filter_joined--ForceBlocks] >> test.py::test[count-count_const_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_uint64--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_uint64--Results] >> test.py::test[ansi_idents-basic_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] >> test.py::test[select-optional_pull--Results] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-ForceBlocks] >> test.py::test[aggr_factory-booland-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-ForceBlocks] >> test.py::test[join-pullup_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners-off-ForceBlocks] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] >> test.py::test[select-dict_lookup_column_names-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] >> test.py::test[schema-user_schema_bind-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Results] >> test.py::test[pg-select_from_columns-default.txt-Results] >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[blocks-mod_uint64--ForceBlocks] [GOOD] >> test.py::test[blocks-mod_uint64--Results] >> test.py::test[pg-tpcds-q72-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] >> test.py::test[join-inner_all--ForceBlocks] [GOOD] >> test.py::test[join-inner_all--Results] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_interval-default.txt-Results] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--ForceBlocks] >> test.py::test[union_all-infer_3-default.txt-ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_uint64--Results] [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-ForceBlocks] >> test.py::test[join-inner_grouped--Results] [GOOD] >> test.py::test[join-join_cbo_3_tables--ForceBlocks] >> test.py::test[produce-process_with_assume--ForceBlocks] >> test.py::test[schema-remap_desc--ForceBlocks] [GOOD] >> test.py::test[schema-remap_desc--Results] >> test.py::test[produce-reduce_multi_in_presort--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> test.py::test[union_all-infer_3-default.txt-Results] >> test.py::test[join-premap_common_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner--Results] >> test.py::test[blocks-add_uint32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint32--Results] >> test.py::test[pg-wide_sort--ForceBlocks] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_rw--ForceBlocks] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] [GOOD] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--ForceBlocks] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] [GOOD] >> test.py::test[select-optional_in_job--ForceBlocks] >> test.py::test[schema-user_schema_bind-default.txt-Results] [GOOD] >> test.py::test[select-braces-default.txt-ForceBlocks] >> test.py::test[insert-trivial_select-default.txt-ForceBlocks] >> test.py::test[window-leading/aggregations--ForceBlocks] [GOOD] >> test.py::test[action-eval_anon_table--ForceBlocks] [GOOD] >> test.py::test[window-leading/aggregations--Results] >> test.py::test[action-eval_anon_table--Results] >> test.py::test[blocks-mod_uint64--Results] [GOOD] >> test.py::test[blocks-pg_to_numbers--ForceBlocks] >> test.py::test[table_range-concat_with_view--ForceBlocks] [GOOD] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[union_all-infer_3-default.txt-Results] [GOOD] >> test.py::test[view-file_eval--ForceBlocks] >> test.py::test[window-win_by_all_avg_interval-default.txt-ForceBlocks] >> test.py::test[type_v3-append_diff_layout2--Results] [SKIPPED] >> test.py::test[type_v3-replace_diff_layout--ForceBlocks] >> test.py::test[blocks-group_by_complex_key--ForceBlocks] [GOOD] >> test.py::test[blocks-group_by_complex_key--Results] >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub--Results] >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint32--Results] [GOOD] >> test.py::test[blocks-add_uint8--ForceBlocks] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[pg-wide_sort--ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Results] >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] >> test.py::test[aggregate-group_by_gs_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op--ForceBlocks] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[table_range-concat_with_view--ForceBlocks] [GOOD] >> test.py::test[pg-join_using_multiple2--ForceBlocks] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] >> test.py::test[join-star_join-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join-off-Results] [SKIPPED] >> test.py::test[join-yql-14847--ForceBlocks] >> test.py::test[join-filter_joined--ForceBlocks] [GOOD] >> test.py::test[join-filter_joined--Results] >> test.py::test[blocks-decimal_comparison--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] >> test.py::test[select-select_all_filtered-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] >> test.py::test[join-premap_common_inner--Results] [GOOD] >> test.py::test[join-premap_common_inner_filter--ForceBlocks] >> test.py::test[action-eval_anon_table--Results] [GOOD] >> test.py::test[action-eval_folder_via_file--ForceBlocks] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_comp_common_table--ForceBlocks] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_with_python_few_keys--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys--Results] [SKIPPED] >> test.py::test[sampling-bind_small_rate-default.txt-ForceBlocks] >> test.py::test[pg-select_from_columns-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] [GOOD] >> test.py::test[sampling-read-dynamic-ForceBlocks] >> test.py::test[view-file_eval--ForceBlocks] [GOOD] >> test.py::test[view-file_eval--Results] [GOOD] >> test.py::test[view-view_with_lambda_process--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint--Results] [GOOD] >> test.py::test[view-file_outer--ForceBlocks] >> test.py::test[optimizers-field_subset_for_multiusage--ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Results] >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Results] >> test.py::test[select-select_all_filtered-default.txt-Results] [GOOD] >> test.py::test[select-select_all_group_by_column--ForceBlocks] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic--ForceBlocks] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> test.py::test[window-leading/aggregations--Results] [GOOD] >> test.py::test[window-rank/opt--ForceBlocks] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> test.py::test[aggregate-count_distinct_with_filter--ForceBlocks] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] >> test.py::test[produce-process_with_assume--ForceBlocks] [GOOD] >> test.py::test[produce-process_with_assume--Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--ForceBlocks] >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] >> test.py::test[join-filter_joined--Results] [GOOD] >> test.py::test[join-inner_with_select-off-ForceBlocks] >> test.py::test[join-join_cbo_3_tables--ForceBlocks] [GOOD] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[join-star_join_inners-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] >> test.py::test[select-braces-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-braces-default.txt-Results] >> test.py::test[bigdate-tz_table_rw--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_rw--Results] >> test.py::test[pg-tpcds-q94-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-ForceBlocks] |86.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[type_v3-replace_diff_layout--ForceBlocks] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Results] >> test.py::test[view-file_outer--ForceBlocks] [GOOD] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--ForceBlocks] >> test.py::test[insert-trivial_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_select-default.txt-Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] >> test.py::test[schema-select_all_inferschema_op--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] >> test.py::test[blocks-pg_to_numbers--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_numbers--Results] >> test.py::test[blocks-add_uint8--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint8--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt-ForceBlocks] >> test.py::test[pg-join_using_multiple2--Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] >> test.py::test[produce-process_with_assume--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--ForceBlocks] >> test.py::test[select-optional_in_job--ForceBlocks] [GOOD] >> test.py::test[select-optional_in_job--Results] >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-ForceBlocks] >> test.py::test[select-braces-default.txt-Results] [GOOD] |86.4%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part11/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[type_v3-replace_diff_layout--Results] [GOOD] >> test.py::test[bigdate-tz_table_rw--Results] [GOOD] >> test.py::test[udf-python_script_from_file--ForceBlocks] [SKIPPED] >> test.py::test[binding-insert_binding--ForceBlocks] >> test.py::test[udf-python_script_from_file--Results] [SKIPPED] >> test.py::test[udf-python_struct--ForceBlocks] [SKIPPED] >> test.py::test[udf-python_struct--Results] [SKIPPED] >> test.py::test[union-union_multiin--ForceBlocks] >> test.py::test[blocks-date_sub--Results] [GOOD] >> test.py::test[blocks-filter_expr--ForceBlocks] >> test.py::test[pg-select_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_select-default.txt-Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-ForceBlocks] >> test.py::test[pg-select_limit-default.txt-Results] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-ForceBlocks] >> test.py::test[join-premap_common_inner_filter--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_filter--Results] >> test.py::test[window-win_by_all_avg_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> test.py::test[action-eval_folder_via_file--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder_via_file--Results] >> test.py::test[join-yql-14847--ForceBlocks] [GOOD] >> test.py::test[join-yql-14847--Results] >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-off-ForceBlocks] >> test.py::test[sampling-bind_small_rate-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-coalesce_bools--ForceBlocks] >> test.py::test[sampling-read-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-read-dynamic-Results] >> test.py::test[view-file_outer_library--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] >> test.py::test[view-view_with_lambda_process--ForceBlocks] [GOOD] >> test.py::test[view-view_with_lambda_process--Results] >> test.py::test[blocks-pg_to_numbers--Results] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--ForceBlocks] [GOOD] >> test.py::test[select-select_all_group_by_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--Results] >> test.py::test[select-select_all_group_by_column--Results] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[select-braces-default.txt-Results] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-ForceBlocks] >> test.py::test[select-optional_in_job--Results] [GOOD] >> test.py::test[select-where_cast-default.txt-ForceBlocks] >> test.py::test[schema-read_schema_change_other--ForceBlocks] >> test.py::test[action-discard-default.txt-ForceBlocks] >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--ForceBlocks] >> test.py::test[pg-select_limit-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q01-default.txt-ForceBlocks] >> test.py::test[blocks-finalize_hashed_keys--Results] [GOOD] >> test.py::test[blocks-pg_call--ForceBlocks] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[action-eval_folder_via_file--Results] [GOOD] >> test.py::test[action-eval_regexp--ForceBlocks] >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--Results] >> test.py::test[sampling-read-dynamic-Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] >> test.py::test[view-view_with_lambda_process--Results] [GOOD] >> test.py::test[window-current/ansi_current--ForceBlocks] >> test.py::test[optimizers-field_subset_for_multiusage--ForceBlocks] [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage--Results] >> test.py::test[blocks-combine_all_pg_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[join-lookupjoin_take_skip--ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_take_skip--Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-ForceBlocks] >> test.py::test[join-inner_with_select-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_with_select-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner-off-ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_basic--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic--Results] >> test.py::test[join-premap_common_inner_filter--Results] [GOOD] >> test.py::test[join-pullup_left_semi--ForceBlocks] >> test.py::test[file-parse_file_in_select_as_int--ForceBlocks] >> test.py::test[select-select_all_group_by_column--Results] [GOOD] >> test.py::test[select-shift_columns-default.txt-ForceBlocks] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last--ForceBlocks] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[join-yql-14847--Results] [GOOD] >> test.py::test[join-yql-8131--ForceBlocks] [SKIPPED] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] >> test.py::test[schema-read_schema_change_other--ForceBlocks] [GOOD] >> test.py::test[schema-read_schema_change_other--Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] >> test.py::test[table_range-each_with_non_existing_all_fail--ForceBlocks] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-ForceBlocks] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-ForceBlocks] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] >> test.py::test[flatten_by-struct_without_correlation-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] >> test.py::test[union-union_multiin--ForceBlocks] [GOOD] >> test.py::test[union-union_multiin--Results] >> test.py::test[blocks-filter_expr--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_expr--Results] |86.4%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part11/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-rank/opt--ForceBlocks] [GOOD] >> test.py::test[window-rank/opt--Results] >> test.py::test[aggregate-aggregate_udf_nested--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-ForceBlocks] >> test.py::test[binding-insert_binding--ForceBlocks] [GOOD] >> test.py::test[binding-insert_binding--Results] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] >> test.py::test[pg-tpch-q01-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_udf--Results] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-ForceBlocks] >> test.py::test[join-join_comp_common_table--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> test.py::test[join-lookupjoin_semi_subq-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off-ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] >> test.py::test[select-where_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-where_cast-default.txt-Results] >> test.py::test[optimizers-yql-7767_key_filter_with_view--ForceBlocks] >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] >> test.py::test[in-in_types_cast_all-default.txt-Results] [GOOD] >> test.py::test[insert-append--ForceBlocks] >> test.py::test[blocks-coalesce_bools--ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_bools--Results] >> test.py::test[optimizers-field_subset_for_multiusage--Results] [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce--Results] [SKIPPED] >> test.py::test[case-case_val_when_then-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] >> test.py::test[optimizers-group_visit_lambdas--ForceBlocks] >> test.py::test[pg-tpcds-q01-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q01-default.txt-Results] >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_equals--ForceBlocks] >> test.py::test[blocks-filter_expr--Results] [GOOD] >> test.py::test[blocks-top_sort_one_asc--ForceBlocks] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] >> test.py::test[join-anyjoin_common_nodata_keys-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_simple--ForceBlocks] >> test.py::test[binding-insert_binding--Results] [GOOD] >> test.py::test[binding-table_concat_binding-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] [GOOD] >> test.py::test[hor_join-group_sampling--ForceBlocks] >> test.py::test[blocks-pg_call--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_call--Results] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[union-union_multiin--Results] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--ForceBlocks] >> test.py::test[select-where_cast-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] >> test.py::test[action-eval_regexp--ForceBlocks] [GOOD] >> test.py::test[action-eval_regexp--Results] >> test.py::test[action-discard-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-discard-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_int--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_int--Results] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] [GOOD] |86.4%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part18/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-8125--ForceBlocks] >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off-ForceBlocks] >> test.py::test[schema-select_all-yamred_dsv-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] >> test.py::test[pg-tpcds-q01-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-ForceBlocks] >> test.py::test[join-pullup_left_semi--ForceBlocks] [GOOD] >> test.py::test[join-pullup_left_semi--Results] >> test.py::test[blocks-coalesce_bools--Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] >> test.py::test[select-shift_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-shift_columns-default.txt-Results] >> test.py::test[join-anyjoin_common_nodup--ForceBlocks] >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test.py::test[schema-append_to_desc_with_remap--ForceBlocks] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--ForceBlocks] >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[window-win_func_first_last--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last--Results] >> test.py::test[type_v3-ignore_v3_hint-protofield-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[blocks-pg_to_interval--ForceBlocks] >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--ForceBlocks] >> test.py::test[action-eval_regexp--Results] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-ForceBlocks] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-ForceBlocks] >> test.py::test[file-parse_file_in_select_as_int--Results] [GOOD] >> test.py::test[flatten_by-flatten_and_where--ForceBlocks] >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[schema-def_values--ForceBlocks] >> test.py::test[window-rank/opt--Results] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-ForceBlocks] >> test.py::test[select-shift_columns-default.txt-Results] [GOOD] >> test.py::test[select-table_content_with_tmp_folder--ForceBlocks] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--ForceBlocks] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Results] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail--ForceBlocks] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail--Results] [SKIPPED] >> test.py::test[produce-process_sorted_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream--Results] [SKIPPED] >> test.py::test[result_types-containers-default.txt-ForceBlocks] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align3-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] >> test.py::test[aggregate-table_row_aggregation-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] >> test.py::test[action-discard-default.txt-Results] [GOOD] >> test.py::test[action-eval_input_output_table--ForceBlocks] >> test.py::test[join-pullup_left_semi--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_and_sum-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] >> test.py::test[insert-append--ForceBlocks] [GOOD] >> test.py::test[insert-append--Results] >> test.py::test[window-current/ansi_current--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current--Results] >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] >> test.py::test[binding-table_concat_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_concat_binding-default.txt-Results] >> test.py::test[join-equi_join_three_simple--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_simple--Results] >> test.py::test[join-mapjoin_with_anonymous-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_anonymous-off-Results] [SKIPPED] >> test.py::test[join-premap_map_inner--ForceBlocks] >> test.py::test[expr-non_persistable_insert_into_fail--ForceBlocks] >> test.py::test[optimizers-group_visit_lambdas--ForceBlocks] [GOOD] >> test.py::test[optimizers-group_visit_lambdas--Results] >> test.py::test[blocks-top_sort_one_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_one_asc--Results] >> test.py::test[pg-tpcds-q27-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-Results] >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] |86.4%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part18/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[weak_field-weak_field_real_col-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[epochs-write_and_use_in_same_epoch--ForceBlocks] >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[join-alias_where_group--ForceBlocks] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-ForceBlocks] >> test.py::test[hor_join-group_sampling--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_sampling--Results] >> test.py::test[binding-table_concat_binding-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o-off-ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint16--ForceBlocks] >> test.py::test[order_by-literal_take_zero_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--Results] |86.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_left_null_column--ForceBlocks] >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] [GOOD] >> test.py::test[aggregate-error_type--ForceBlocks] >> test.py::test[blocks-pg_to_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_interval--Results] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] >> test.py::test[pg-tpcds-q27-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-ForceBlocks] >> test.py::test[schema-append_to_desc_with_remap--ForceBlocks] [GOOD] >> test.py::test[schema-append_to_desc_with_remap--Results] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> test.py::test[file-where_key_in_file_content--ForceBlocks] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-ForceBlocks] >> test.py::test[schema-select_with_map-partial_read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-Results] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--ForceBlocks] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_one_asc--Results] [GOOD] >> test.py::test[case-case_when_then-default.txt-ForceBlocks] >> test.py::test[schema-def_values--ForceBlocks] [GOOD] >> test.py::test[schema-def_values--Results] >> test.py::test[aggregate-group_by_ru_join_simple--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Results] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_strict--ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodup--Results] >> test.py::test[order_by-literal_take_zero_sort--Results] [GOOD] >> test.py::test[optimizers-group_visit_lambdas--Results] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-ForceBlocks] >> test.py::test[optimizers-remove_keep_sorted_setting--ForceBlocks] >> test.py::test[limit-insert_with_limit--ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> test.py::test[flatten_by-flatten_and_where--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] >> test.py::test[window-current/ansi_current--Results] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow--ForceBlocks] >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[optimizers-nonselected_direct_row--Results] >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] >> test.py::test[select-table_content_with_tmp_folder--ForceBlocks] [GOOD] >> test.py::test[select-table_content_with_tmp_folder--Results] >> test.py::test[blocks-pg_to_interval--Results] [GOOD] >> test.py::test[blocks-sort_two_asc--ForceBlocks] >> test.py::test[join-yql-8125--ForceBlocks] [GOOD] >> test.py::test[join-yql-8125--Results] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_expr_bounds--ForceBlocks] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_per_link-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] >> test.py::test[join-equi_join_three_simple--Results] [GOOD] >> test.py::test[join-equi_join_three_simple-off-ForceBlocks] >> test.py::test[schema-select_with_map-partial_read_schema-Results] [GOOD] >> test.py::test[schema-select_yamr_fields--ForceBlocks] >> test.py::test[schema-append_to_desc_with_remap--Results] [GOOD] >> test.py::test[schema-def_values_job--ForceBlocks] >> test.py::test[aggregate-error_type--ForceBlocks] [GOOD] >> test.py::test[aggregate-error_type--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--ForceBlocks] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_sampling--Results] [GOOD] >> test.py::test[hor_join-group_yamr--ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-ForceBlocks] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] >> test.py::test[join-pushdown_filter_over_left-off-ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-Results] [SKIPPED] >> test.py::test[join-three_equalities-off-ForceBlocks] >> test.py::test[action-eval_input_output_table--ForceBlocks] [GOOD] >> test.py::test[action-eval_input_output_table--Results] >> test.py::test[window-win_by_all_percentile_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[lineage-error_type--ForceBlocks] [SKIPPED] >> test.py::test[lineage-error_type--Results] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-sort_force--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-sort_force--Results] [SKIPPED] >> test.py::test[optimizers-reduce_with_aux_sort_column--ForceBlocks] >> test.py::test[join-premap_map_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_inner--Results] >> test.py::test[result_types-containers-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-containers-default.txt-Results] >> test.py::test[simple_columns-simple_columns_base_fail--Results] >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-discard_reduce_lambda--ForceBlocks] [SKIPPED] >> test.py::test[produce-discard_reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-process_and_filter-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_and_where--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-ForceBlocks] >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-ForceBlocks] >> test.py::test[blocks-add_uint16--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint16--Results] >> test.py::test[pg-tpcds-q52-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[blocks-date_equals--ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals--Results] >> test.py::test[join-alias_where_group--ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group--Results] >> test.py::test[action-eval_input_output_table--Results] [GOOD] >> test.py::test[aggr_factory-boolor-default.txt-ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--ForceBlocks] >> test.py::test[type_v3-json--Results] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:10:13] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:10:15] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:10:16] "GET /nested_library.sql.txt HTTP/1.1" 200 - >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] >> test.py::test[file-where_key_in_file_content--ForceBlocks] [GOOD] >> test.py::test[file-where_key_in_file_content--Results] >> test.py::test[join-mapjoin_left_null_column--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_left_null_column--Results] >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_without_column-off-ForceBlocks] >> test.py::test[join-premap_map_inner--Results] [GOOD] >> test.py::test[join-premap_map_inner-off-ForceBlocks] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-ForceBlocks] >> test.py::test[result_types-containers-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--ForceBlocks] >> test.py::test[case-case_when_then-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[case-case_when_then-default.txt-Results] >> test.py::test[order_by-assume_cut_prefix--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] >> test.py::test[pg-tpcds-q56-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_strict--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_strict--Results] >> test.py::test[blocks-add_uint16--Results] [GOOD] >> test.py::test[blocks-block_output_various_types--ForceBlocks] >> test.py::test[simple_columns-simple_columns_base_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] >> test.py::test[blocks-block_output_various_types--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_output_various_types--Results] [SKIPPED] >> test.py::test[blocks-coalesce_ints--ForceBlocks] >> test.py::test[order_by-order_by_value_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_simple--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-ForceBlocks] >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-ForceBlocks] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_stat--ForceBlocks] >> test.py::test[schema-select_yamr_fields--ForceBlocks] [GOOD] >> test.py::test[schema-select_yamr_fields--Results] >> test.py::test[blocks-sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_asc--Results] >> test.py::test[hor_join-group_yamr--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_yamr--Results] >> test.py::test[schema-def_values_job--ForceBlocks] [GOOD] >> test.py::test[schema-def_values_job--Results] >> test.py::test[file-where_key_in_file_content--Results] [GOOD] >> test.py::test[hor_join-out_mem_limit-default.txt-ForceBlocks] >> test.py::test[optimizers-remove_keep_sorted_setting--ForceBlocks] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] >> test.py::test[order_by-assume_cut_prefix--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_and_having--Results] >> test.py::test[join-equi_join_three_simple-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_simple-off-Results] [SKIPPED] >> test.py::test[join-full_equal_null--ForceBlocks] >> test.py::test[case-case_when_then-default.txt-Results] [GOOD] >> test.py::test[column_group-hint-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint-single-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_append_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4--Results] [SKIPPED] >> test.py::test[column_order-align_publish--ForceBlocks] >> test.py::test[schema-select_yamr_fields--Results] [GOOD] >> test.py::test[select-if-default.txt-ForceBlocks] >> test.py::test[join-equi_join_two_mult_keys-off-Results] [SKIPPED] >> test.py::test[join-full_equal_null-off-Results] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off-Results] [SKIPPED] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] >> test.py::test[weak_field-weak_field_strict--Results] [GOOD] >> test.py::test[window-current/session--ForceBlocks] >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] [GOOD] >> test.py::test[schema-diffrerent_schemas--Results] >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [SKIPPED] >> test.py::test[join-premap_common_cross--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_ref--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] >> test.py::test[window-win_expr_bounds--ForceBlocks] [GOOD] >> test.py::test[window-win_expr_bounds--Results] >> test.py::test[join-mapjoin_left_null_column--Results] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-ForceBlocks] >> test.py::test[join-alias_where_group--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-ForceBlocks] >> test.py::test[epochs-write_and_use_in_same_epoch--ForceBlocks] [GOOD] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] >> test.py::test[hor_join-group_yamr--Results] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt-ForceBlocks] >> test.py::test[window-full/noncompact_with_tablerow--ForceBlocks] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow--Results] >> test.py::test[produce-process_and_filter-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Results] >> test.py::test[blocks-sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] >> test.py::test[join-three_equalities-off-ForceBlocks] [GOOD] >> test.py::test[join-three_equalities-off-Results] [SKIPPED] >> test.py::test[join-yql_465-off-ForceBlocks] >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[schema-select_all-row_spec-ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--Results] [GOOD] >> test.py::test[join-cbo_4tables--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_4tables--Results] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval--ForceBlocks] >> test.py::test[select-trivial_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[schema-diffrerent_schemas--Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] |86.5%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part12/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[weak_field-weak_field_long_fields--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] >> test.py::test[optimizers-reduce_with_aux_sort_column--ForceBlocks] [GOOD] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] >> test.py::test[aggregate-group_by_expr_and_having--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--ForceBlocks] >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-non_strict--Results] |86.5%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part17/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[expr-empty_iterator--Results] >> test.py::test[produce-process_and_filter-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] [GOOD] >> test.py::test[optimizers-unordered_over_sort--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort--Results] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--Results] [SKIPPED] >> test.py::test[order_by-literal_single_item_sort--ForceBlocks] >> test.py::test[join-premap_map_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_map_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap-off-ForceBlocks] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_into_udf--ForceBlocks] >> test.py::test[blocks-coalesce_ints--ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_ints--Results] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q56-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test.py::test[weak_field-weak_field_long_fields--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-ForceBlocks] >> test.py::test[sampling-bind_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] >> test.py::test[join-join_without_column-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_column-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] >> test.py::test[aggr_factory-boolor-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-boolor-default.txt-Results] >> test.py::test[blocks-date_equals--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--ForceBlocks] >> test.py::test[aggr_factory-sum_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-Results] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Results] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-one-ForceBlocks] >> test.py::test[optimizers-sorted_sql_in--Results] [SKIPPED] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] >> test.py::test[order_by-order_by_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-Results] >> test.py::test[join-full_equal_null--ForceBlocks] [GOOD] >> test.py::test[join-full_equal_null--Results] >> test.py::test[select-if-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-if-default.txt-Results] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[window-full/noncompact_with_tablerow--Results] [GOOD] >> test.py::test[window-full/session_aliases_compact--ForceBlocks] >> test.py::test[aggregate-group_by_mul_gb_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Results] >> test.py::test[blocks-coalesce_ints--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_list--ForceBlocks] >> test.py::test[sampling-bind_expr-default.txt-Results] [GOOD] >> test.py::test[schema-concat--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] >> test.py::test[blocks-combine_all_avg_filter_opt--ForceBlocks] >> test.py::test[join-premap_common_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_cross--Results] >> test.py::test[window-win_func_aggr_stat--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_stat--Results] >> test.py::test[column_order-align_publish--ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish--Results] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> test.py::test[hor_join-out_table_record-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt-Results] >> test.py::test[pg-doubles_search_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] >> test.py::test[join-mapjoin_left_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type-off-ForceBlocks] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] >> test.py::test[schema-select_all-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec-Results] >> test.py::test[join-yql_465-off-ForceBlocks] [GOOD] >> test.py::test[join-yql_465-off-Results] [SKIPPED] >> test.py::test[key_filter-is_null_with_condition--ForceBlocks] >> test.py::test[order_by-order_by_tuple-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--ForceBlocks] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] >> test.py::test[select-if-default.txt-Results] [GOOD] >> test.py::test[select-logical_ops-default.txt-ForceBlocks] >> test.py::test[aggr_factory-sum_if-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByOneField--ForceBlocks] >> test.py::test[hor_join-out_mem_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Results] >> test.py::test[join-full_equal_null--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat--ForceBlocks] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] [GOOD] >> test.py::test[join-join_without_column-off-Results] [SKIPPED] >> test.py::test[join-left_cast_to_string--Results] >> test.py::test[aggr_factory-boolor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-max-default.txt-ForceBlocks] >> test.py::test[join-equi_join_three_asterisk_eval--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval--Results] >> test.py::test[bigdate-table_arithmetic-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_column_fail--Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables--ForceBlocks] [SKIPPED] >> test.py::test[order_by-literal_single_item_sort--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables--Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--ForceBlocks] >> test.py::test[order_by-literal_single_item_sort--Results] >> test.py::test[schema-select_all-row_spec-Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[column_order-align_publish--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> test.py::test[hor_join-out_table_record-default.txt-Results] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-ForceBlocks] >> test.py::test[column_order-select_action-default.txt-ForceBlocks] >> test.py::test[pg-doubles_search_path-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-ForceBlocks] >> test.py::test[window-current/session--ForceBlocks] [GOOD] >> test.py::test[window-current/session--Results] >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-ForceBlocks] >> test.py::test[select-trivial_where-one-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] >> test.py::test[window-win_func_aggr_stat--Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] >> test.py::test[join-premap_nonseq_flatmap-off-Results] >> test.py::test[select-trivial_where-one-Results] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals_scalar--Results] >> test.py::test[join-premap_nonseq_flatmap-off-Results] [SKIPPED] >> test.py::test[join-yql-8125-off-ForceBlocks] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Results] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-ForceBlocks] >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[column_group-hint_empty_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail--Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_filter--Results] >> test.py::test[expr-non_persistable_group_by_column_fail--Results] [GOOD] >> test.py::test[flatten_by-flatten_list--Results] >> test.py::test[pg-tpcds-q74-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-Results] >> test.py::test[schema-concat--ForceBlocks] [GOOD] >> test.py::test[schema-concat--Results] >> test.py::test[window-win_func_into_udf--ForceBlocks] [GOOD] >> test.py::test[window-win_func_into_udf--Results] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--ForceBlocks] >> test.py::test[type_v3-replace_diff_layout--Results] [GOOD] >> test.py::test[udf-python_struct--Results] [SKIPPED] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[window-win_func_aggr_stat--Results] [GOOD] >> test.py::test[flatten_by-flatten_list--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_list--Results] >> test.py::test[blocks-combine_all_avg_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] [GOOD] >> test.py::test[hor_join-out_range-default.txt-ForceBlocks] >> test.py::test[key_filter-is_null_with_condition--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Results] >> test.py::test[pg-tpcds-q74-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[aggregate-GroupByOneField--ForceBlocks] [GOOD] >> test.py::test[aggregate-GroupByOneField--Results] >> test.py::test[schema-concat--Results] [GOOD] >> test.py::test[schema-insert-read_schema-ForceBlocks] >> test.py::test[order_by-order_by_udf_duo--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] |86.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part17/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] >> test.py::test[window-current/session--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--ForceBlocks] >> test.py::test[select-logical_ops-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-logical_ops-default.txt-Results] >> test.py::test[join-equi_join_three_asterisk_eval--Results] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--ForceBlocks] >> test.py::test[window-full/aggregations_leadlag_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Results] >> test.py::test[join-inner_trivial_from_concat--ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial_from_concat--Results] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-ForceBlocks] >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] >> test.py::test[table_range-each_with_non_existing--ForceBlocks] [GOOD] >> test.py::test[table_range-each_with_non_existing--Results] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_in_in_in--ForceBlocks] >> test.py::test[pg-tpcds-q32-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-Results] >> test.py::test[hor_join-yql-6477_table_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Results] >> test.py::test[join-mapjoin_on_very_complex_type-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce--ForceBlocks] >> test.py::test[flatten_by-flatten_list--Results] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--ForceBlocks] |86.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part12/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-combine_all_avg_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter--ForceBlocks] >> test.py::test[join-left_cast_to_string--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_null--Results] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] >> test.py::test[aggr_factory-max-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Results] >> test.py::test[select-logical_ops-default.txt-Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-ForceBlocks] >> test.py::test[aggregate-GroupByOneField--Results] [GOOD] >> test.py::test[aggregate-dedup_state_keys--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] >> test.py::test[join-premap_common_inner_filter-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross--ForceBlocks] >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] >> test.py::test[table_range-range_over_filter--ForceBlocks] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[select-struct_members-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-15210_sqlin--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-15210_sqlin--Results] [SKIPPED] >> test.py::test[optimizers-yql-3455_filter_sorted--ForceBlocks] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> test.py::test[pg-tpcds-q32-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-ForceBlocks] >> test.py::test[column_order-select_action-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_action-default.txt-Results] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[window-full/session_aliases_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/session_aliases_compact--Results] >> test.py::test[join-inner_trivial_from_concat--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat-off-ForceBlocks] >> test.py::test[blocks-date_equals_scalar--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--ForceBlocks] >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> test.py::test[pg-tpcds-q92-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-ForceBlocks] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test.py::test[produce-process_with_udf-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] >> test.py::test[column_order-select_action-default.txt-Results] [GOOD] >> test.py::test[column_order-select_sample-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_list--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-anon_clash--Results] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage--Results] [SKIPPED] |86.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[select-unlabeled--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--Results] >> test.py::test[hor_join-out_range-default.txt-Results] >> test.py::test[aggr_factory-max-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-ForceBlocks] >> test.py::test[join-yql-8125-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-8125-off-Results] [SKIPPED] >> test.py::test[key_filter-contains-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[window-full/session_aliases--ForceBlocks] >> test.py::test[order_by-order_by_missing_project_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-sort_force--Results] [SKIPPED] >> test.py::test[pg-tpcds-q92-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-ForceBlocks] >> test.py::test[schema-insert-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert-read_schema-Results] >> test.py::test[optimizers-aggregate_over_aggregate--Results] >> test.py::test[hor_join-out_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_range-default.txt-Results] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] >> test.py::test[produce-process_with_udf-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_list_table--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda_list_table--Results] [SKIPPED] >> test.py::test[sampling-bind_join_right-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_any_no_join_reduce--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--ForceBlocks] [GOOD] >> test.py::test[window-full/session_aliases_compact--Results] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] >> test.py::test[join-bush_in_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in--Results] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--ForceBlocks] >> test.py::test[blocks-combine_all_max_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max_filter--Results] >> test.py::test[aggregate-dedup_state_keys--ForceBlocks] [GOOD] >> test.py::test[aggregate-dedup_state_keys--Results] >> test.py::test[select-struct_members-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-struct_members-default.txt-Results] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> test.py::test[table_range-range_over_filter--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_filter--Results] >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> test.py::test[flatten_by-flatten_two_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] >> test.py::test[join-equi_join_two_mult_keys--Results] [GOOD] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--ForceBlocks] [GOOD] >> test.py::test[join-pullup_cross--ForceBlocks] [GOOD] >> test.py::test[join-pullup_cross--Results] >> test.py::test[select-multi_source_issue-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> test.py::test[join-equi_join_two_mult_keys-off-ForceBlocks] >> test.py::test[join-inner_trivial_from_concat-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial_from_concat-off-Results] [SKIPPED] >> test.py::test[join-join_without_column--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--ForceBlocks] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] >> test.py::test[optimizers-yql-3455_filter_sorted--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_some--ForceBlocks] >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Results] >> test.py::test[select-struct_members-default.txt-Results] [GOOD] >> test.py::test[select-where_not_null--ForceBlocks] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary--ForceBlocks] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-ForceBlocks] >> test.py::test[aggregate-dedup_state_keys--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--ForceBlocks] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[order_by-sort_with_take--ForceBlocks] >> test.py::test[sampling-zero_percentage--Results] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[column_order-select_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_sample-default.txt-Results] >> test.py::test[order_by-SortByTwoFields--Results] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] >> test.py::test[hor_join-out_range-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted--ForceBlocks] >> test.py::test[key_filter-contains-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-contains-default.txt-Results] >> test.py::test[pg-tpcds-q62-default.txt-Results] >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-ForceBlocks] >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> test.py::test[join-pullup_cross--Results] [GOOD] >> test.py::test[join-pullup_exclusion--ForceBlocks] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_desc--Results] >> test.py::test[pg-tpcds-q36-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] [SKIPPED] >> test.py::test[join-simple_columns_partial--Results] >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in_in-off-ForceBlocks] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] >> test.py::test[sampling-bind_join_right-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_null--Results] [GOOD] >> test.py::test[join-left_only_with_other--Results] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:11:12] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[column_order-select_sample-default.txt-Results] [GOOD] >> test.py::test[dq-mem_limit--ForceBlocks] [SKIPPED] >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[pg-pg_column_case--Results] >> test.py::test[key_filter-contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-datetime-default.txt-ForceBlocks] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] [GOOD] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] >> test.py::test[optimizers-aggregate_over_aggregate--Results] [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] [SKIPPED] >> test.py::test[optimizers-yql-15210_sqlin--Results] [SKIPPED] >> test.py::test[order_by-assume_over_input--Results] >> test.py::test[insert-select_relabel-default.txt-Results] >> test.py::test[window-generic/aggregations_mixed--ForceBlocks] >> test.py::test[in-in_tablesource_to_equijoin--ForceBlocks] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--Results] >> test.py::test[aggr_factory-variance-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[union_all-union_all_trivial-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--Results] >> test.py::test[window-presort_window_partition_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] >> test.py::test[join-equi_join_two_mult_keys-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_two_mult_keys-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns1--ForceBlocks] >> test.py::test[pg-select_from_columns_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-Results] >> test.py::test[pg-tpch-q07-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-Results] >> test.py::test[join-join_without_column--ForceBlocks] [GOOD] >> test.py::test[join-join_without_column--Results] >> test.py::test[blocks-combine_hashed_some--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_some--Results] >> test.py::test[sampling-zero_percentage--Results] [GOOD] >> test.py::test[schema-concat--Results] >> test.py::test[select-where_not_null--ForceBlocks] [GOOD] >> test.py::test[select-where_not_null--Results] >> test.py::test[aggregate-group_compact_sorted--Results] [GOOD] |86.6%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_aliases--ForceBlocks] >> test.py::test[join-mergejoin_choose_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary--Results] >> test.py::test[window-full/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-full/session_aliases--Results] >> test.py::test[pg-tpcds-q62-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] >> test.py::test[sampling-bind_join_right-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-ForceBlocks] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] >> test.py::test[pg-tpcds-q36-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-ForceBlocks] >> test.py::test[blocks-date_greater_or_equal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] >> test.py::test[order_by-sort_with_take--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take--Results] >> test.py::test[pg-select_from_columns_star-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery-default.txt-ForceBlocks] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] >> test.py::test[in-in_sorted--ForceBlocks] [GOOD] >> test.py::test[in-in_sorted--Results] >> test.py::test[hor_join-out_range-default.txt-Results] [GOOD] >> test.py::test[hor_join-row_num_per_sect--Results] >> test.py::test[count-boolean_count--Results] >> test.py::test[join-join_without_column--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-ForceBlocks] >> test.py::test[in-in_with_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-Results] >> test.py::test[select-where_not_null--Results] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] >> test.py::test[blocks-combine_hashed_some--Results] [GOOD] >> test.py::test[blocks-compare_dates_floats_bools--ForceBlocks] [SKIPPED] >> test.py::test[blocks-compare_dates_floats_bools--Results] [SKIPPED] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] [GOOD] >> test.py::test[window-rank/unordered--ForceBlocks] >> test.py::test[table_range-range_over_desc--Results] [GOOD] >> test.py::test[table_range-range_over_like--Results] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] [GOOD] >> test.py::test[action-discard-default.txt-Results] >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] >> test.py::test[join-pullup_exclusion--ForceBlocks] [GOOD] >> test.py::test[join-pullup_exclusion--Results] >> test.py::test[order_by-sort_with_take--Results] [GOOD] >> test.py::test[pg-aggregate_combine_all--ForceBlocks] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> test.py::test[join-mergejoin_choose_primary--Results] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] >> test.py::test[order_by-assume_over_input--Results] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--Results] |86.6%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> test.py::test[aggr_factory-variance-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-ForceBlocks] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[insert-select_relabel-default.txt-Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[blocks-compare_dates_floats_bools--Results] [SKIPPED] >> test.py::test[aggregate-group_by_cube_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--Results] >> test.py::test[join-bush_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-opt_on_opt_side-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi--Results] >> test.py::test[in-in_with_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-after_group_by-default.txt-ForceBlocks] |86.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> test.py::test[schema-concat--Results] [GOOD] >> test.py::test[schema-copy-schema-Results] >> test.py::test[join-flatten_columns1--ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns1--Results] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[join-mergejoin_choose_primary--Results] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-ForceBlocks] >> test.py::test[in-in_sorted--Results] [GOOD] >> test.py::test[insert-append_after_replace-default.txt-ForceBlocks] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[pg-tpch-q02-default.txt-Results] >> test.py::test[pg-tpcds-q65-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[sampling-subquery_default-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [GOOD] >> test.py::test[expr-constraints_of--ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Results] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] >> test.py::test[join-pullup_exclusion--Results] [GOOD] >> test.py::test[join-pullup_inner-off-ForceBlocks] >> test.py::test[table_range-range_over_like--Results] [GOOD] >> test.py::test[type_v3-append_diff_layout2--Results] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint--Results] >> test.py::test[join-left_only_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected--Results] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-mixed/aggregations--ForceBlocks] >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] >> test.py::test[order_by-literal_take_zero_sort--Results] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] >> test.py::test[aggregate-group_by_session_aliases--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--Results] >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] >> test.py::test[pg-select_subquery-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-ForceBlocks] >> test.py::test[join-flatten_columns1--Results] [GOOD] >> test.py::test[join-flatten_columns1-off-ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-ForceBlocks] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Results] >> test.py::test[order_by-order_by_expr_mul_cols--Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] >> test.py::test[aggregate-histogram_cdf-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[insert-two_input_tables--Results] >> test.py::test[weak_field-weak_field_join_condition--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] >> test.py::test[count-boolean_count--Results] [GOOD] >> test.py::test[count-count_by_nulls--Results] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine_all--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[join-join_without_correlation_and_struct_access-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--ForceBlocks] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[hor_join-row_num_per_sect--Results] [GOOD] >> test.py::test[hor_join-yield_off--Results] >> test.py::test[aggr_factory-mode-default.txt-Results] >> test.py::test[aggregate-group_by_cube_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--ForceBlocks] >> test.py::test[hor_join-yield_off--Results] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Results] [SKIPPED] >> test.py::test[in-in_with_list_dict-default.txt-Results] >> test.py::test[schema-copy-schema-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> test.py::test[window-rank/unordered--ForceBlocks] [GOOD] >> test.py::test[window-rank/unordered--Results] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test.py::test[insert-after_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-after_group_by-default.txt-Results] >> test.py::test[in-in_tablesource_to_equijoin--Results] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-ForceBlocks] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[aggregate-group_by_session_aliases--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test.py::test[window-generic/aggregations_mixed--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_mixed--Results] |86.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[expr-constraints_of--ForceBlocks] [GOOD] >> test.py::test[expr-constraints_of--Results] >> test.py::test[blocks-finalize_hashed_keys--Results] [GOOD] >> test.py::test[blocks-interval_mul--Results] >> test.py::test[action-discard-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job--Results] >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-ForceBlocks] >> test.py::test[join-simple_columns_partial--Results] [GOOD] >> test.py::test[join-star_join_semionly--Results] >> test.py::test[join-premap_common_semi--Results] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Results] [SKIPPED] >> test.py::test[join-premap_no_premap--Results] >> test.py::test[insert-append_after_replace-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-append_after_replace-default.txt-Results] >> test.py::test[type_v3-ignore_v3_hint--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda--ForceBlocks] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-ForceBlocks] |86.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[expr-empty_iterator--ForceBlocks] >> test.py::test[insert-after_group_by-default.txt-Results] [GOOD] >> test.py::test[insert-part_sortness--ForceBlocks] >> test.py::test[pg-tpcds-q43-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test.py::test[join-pullup_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left-off-ForceBlocks] >> test.py::test[select-optional_as_warn-default.txt-Results] [GOOD] >> test.py::test[window-rank/unordered--Results] [GOOD] >> test.py::test[window-win_over_few_partitions--ForceBlocks] >> test.py::test[schema-select_all-row_spec_part-Results] [GOOD] >> test.py::test[schema-select_all_forceinferschema--Results] [SKIPPED] >> test.py::test[schema-select_all_inferschema_range_empty_fail--Results] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--Results] >> test.py::test[select-refselect-1000-Results] [SKIPPED] >> test.py::test[select-scalar_subquery-default.txt-Results] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> test.py::test[sampling-subquery_filter-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-Results] >> test.py::test[join-flatten_columns1-off-ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns1-off-Results] [SKIPPED] >> test.py::test[join-full_join-off-ForceBlocks] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[insert-udf_empty--Results] >> test.py::test[insert-append_after_replace-default.txt-Results] [GOOD] >> test.py::test[insert-insert_null-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] >> test.py::test[blocks-date_less_scalar--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected--Results] [GOOD] >> test.py::test[table_range-merge_non_strict--ForceBlocks] >> test.py::test[join-lookupjoin_with_cache--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] >> test.py::test[in-in_with_list_dict-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta_view_fail--Results] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-ForceBlocks] >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] >> test.py::test[join-lookupjoin_inner_1o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Results] >> test.py::test[binding-table_regexp_strict_binding--ForceBlocks] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--Results] >> test.py::test[action-eval_folder_via_file_in_job--Results] [GOOD] >> test.py::test[action-eval_input_output_table--Results] >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] >> test.py::test[sampling-zero_percentage--ForceBlocks] >> test.py::test[window-generic/aggregations_mixed--Results] [GOOD] >> test.py::test[window-rank/plain--ForceBlocks] >> test.py::test[schema-select_all_inferschema_range_empty_fail--Results] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] >> test.py::test[schema-select_with_map-partial_read_schema-Results] >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_view_fail--Results] [GOOD] >> test.py::test[insert-merge_publish--Results] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[binding-table_regexp_strict_binding--Results] [GOOD] >> test.py::test[blocks-add_int8--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--Results] >> test.py::test[window-mixed/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-mixed/aggregations--Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_query [SKIPPED] >> test.py::test[aggregate-aggregate_with_lambda--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda--Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] >> test.py::test[aggregate-group_by_session_extended--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Results] >> test.py::test[join-lookupjoin_inner_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] >> test.py::test[aggregate-list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-fail_read_view_after_modify--ForceBlocks] |86.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test.py::test[expr-empty_iterator--ForceBlocks] [GOOD] >> test.py::test[expr-empty_iterator--Results] >> test.py::test[order_by-order_by_list_of_strings--Results] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--Results] >> test.py::test[insert-part_sortness--ForceBlocks] [GOOD] >> test.py::test[insert-part_sortness--Results] >> test.py::test[insert-udf_empty--Results] [GOOD] >> test.py::test[insert-yql-14538--Results] >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q14-default.txt-ForceBlocks] >> test.py::test[insert-insert_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-insert_null-default.txt-Results] >> test.py::test[join-mapjoin_with_empty_struct--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align3-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted--Results] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[blocks-interval_mul--Results] [GOOD] >> test.py::test[blocks-member--Results] >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] >> test.py::test[join-star_join_semionly--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--Results] [GOOD] >> test.py::test[view-file_inner_library--Results] >> test.py::test[aggregate-aggregate_with_lambda--Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q54-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-Results] >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[insert-fail_read_view_after_modify--ForceBlocks] [GOOD] >> test.py::test[insert-fail_read_view_after_modify--Results] [GOOD] >> test.py::test[insert-override-from_sorted_calc-ForceBlocks] >> test.py::test[order_by-order_by_tablepath_column--Results] [GOOD] >> test.py::test[order_by-ordered_fill--Results] >> test.py::test[insert-part_sortness--Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-ForceBlocks] >> test.py::test[insert-insert_null-default.txt-Results] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-ForceBlocks] >> test.py::test[join-yql-14829_left-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[key_filter-calc_dependent-default.txt-ForceBlocks] >> test.py::test[schema-select_with_map-partial_read_schema-Results] [GOOD] >> test.py::test[schema-skip_complex_type2--Results] >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[table_range-merge_non_strict--ForceBlocks] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] >> test.py::test[action-eval_input_output_table--Results] [GOOD] >> test.py::test[action-eval_typeof_output_table--Results] >> test.py::test[aggregate-list_nullable--ForceBlocks] >> test.py::test[sampling-zero_percentage--ForceBlocks] [GOOD] >> test.py::test[sampling-zero_percentage--Results] >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[join-full_join-off-ForceBlocks] [GOOD] >> test.py::test[join-full_join-off-Results] [SKIPPED] >> test.py::test[join-inner_with_select--ForceBlocks] >> test.py::test[window-win_over_few_partitions--ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions--Results] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q54-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-ForceBlocks] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> test.py::test[blocks-add_int8--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int8--Results] >> test.py::test[join-lookupjoin_with_cache--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_sharded-default.txt-Results] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test.py::test[aggregate-group_by_cube_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Results] >> test.py::test[blocks-date_less_or_equal--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[sampling-zero_percentage--Results] [GOOD] >> test.py::test[schema-append_to_desc--ForceBlocks] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--Results] [GOOD] >> test.py::test[window-mixed/aggregations--Results] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--ForceBlocks] >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q13-default.txt-ForceBlocks] >> test.py::test[insert-merge_publish--Results] [GOOD] >> test.py::test[insert-override--Results] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[action-eval_typeof_output_table--Results] [GOOD] >> test.py::test[action-insert_each_from_folder--Results] >> test.py::test[view-file_inner_library--Results] [GOOD] >> test.py::test[view-view_with_library--Results] >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] [GOOD] >> test.py::test[blocks-add_int8--Results] [GOOD] >> test.py::test[blocks-block_input--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input--Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--ForceBlocks] >> test.py::test[window-rank/plain--ForceBlocks] [GOOD] >> test.py::test[window-rank/plain--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[blocks-minmax_strings--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test.py::test[pg-tpch-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] [GOOD] >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] >> test.py::test[pg-tpch-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q14-default.txt-Results] >> test.py::test[window-win_over_few_partitions--Results] [GOOD] >> test.py::test[ypath-multi_range-default.txt-ForceBlocks] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] >> test.py::test[schema-skip_complex_type2--Results] [GOOD] >> test.py::test[schema-user_schema_missing_column--Results] >> test.py::test[insert-replace_ordered_by_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--Results] >> test.py::test[insert-override-from_sorted_calc-ForceBlocks] [GOOD] >> test.py::test[insert-override-from_sorted_calc-Results] >> test.py::test[insert-insert_relabeled-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Results] >> test.py::test[key_filter-calc_dependent-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent-default.txt-Results] >> test.py::test[aggregate-list_nullable--ForceBlocks] [GOOD] >> test.py::test[aggregate-list_nullable--Results] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[join-inner_with_select--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] >> test.py::test[join-inner_with_select--Results] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--Results] >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] [GOOD] >> test.py::test[insert-select_subquery--ForceBlocks] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] >> test.py::test[insert-override--Results] [GOOD] >> test.py::test[insert-override-from_sorted_calc-Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-ForceBlocks] >> test.py::test[insert-insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta--ForceBlocks] >> test.py::test[insert-override-from_sorted_calc-Results] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] [GOOD] >> test.py::test[order_by-singular-default.txt-Results] [SKIPPED] >> test.py::test[order_by-sort_decimals--Results] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[key_filter-calc_dependent-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-ForceBlocks] >> test.py::test[schema-append_to_desc--ForceBlocks] [GOOD] >> test.py::test[schema-append_to_desc--Results] >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[aggregate-compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-count_distinct_with_filter--Results] >> test.py::test[join-premap_no_premap--Results] [GOOD] >> test.py::test[join-pullup_left_semi-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] [SKIPPED] >> test.py::test[join-star_join_multi--Results] >> test.py::test[action-insert_each_from_folder--Results] [GOOD] >> test.py::test[action-subquery-default.txt-Results] |86.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-ForceBlocks] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test.py::test[view-view_with_library--Results] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Results] >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-ForceBlocks] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Results] [SKIPPED] >> test.py::test[join-yql-4275-off-Results] [SKIPPED] >> test.py::test[aggregate-list_nullable--Results] [GOOD] >> test.py::test[aggregate-percentiles_containers--ForceBlocks] >> test.py::test[json-jsondocument/select--Results] >> test.py::test[tpch-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] >> test.py::test[schema-user_schema_missing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix2--Results] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-minmax_strings--ForceBlocks] >> test.py::test[window-rank/plain--Results] [GOOD] >> test.py::test[window-win_inline_spec-default.txt-ForceBlocks] >> test.py::test[join-inner_with_select--Results] [GOOD] >> test.py::test[join-join_key_cmp_udf--ForceBlocks] >> test.py::test[schema-append_to_desc--Results] [GOOD] >> test.py::test[schema-copy-schema-ForceBlocks] >> test.py::test[ypath-multi_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-multi_range-default.txt-Results] >> test.py::test[join-mapjoin_sharded-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary--Results] >> test.py::test[window-win_func_order_by_udf_empty_rank--ForceBlocks] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] [GOOD] >> test.py::test[window-current/session--Results] >> test.py::test[aggregate-group_by_cube_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_delay--ForceBlocks] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[key_filter-datetime-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-Results] >> test.py::test[insert-override-from_sorted_calc-Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-Results] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> test.py::test[ypath-multi_range-default.txt-Results] [GOOD] >> test.py::test[ytflow-select_over_static--ForceBlocks] [SKIPPED] >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[json-jsondocument/select--Results] [GOOD] >> test.py::test[key_filter-contains-default.txt-Results] >> test.py::test[insert-select_subquery--ForceBlocks] [GOOD] >> test.py::test[insert-select_subquery--Results] >> test.py::test[blocks-combine_all_avg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg--Results] >> test.py::test[insert-override-from_sorted_desc-ForceBlocks] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-Results] >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] [SKIPPED] >> test.py::test[schema-user_schema_mix2--Results] [GOOD] >> test.py::test[select-create_tuples-default.txt-Results] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_scalar--Results] >> test.py::test[pg-tpcds-q87-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-Results] >> test.py::test[aggregate-group_by_hop_bad_delay--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_delay--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct_compact--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[insert-keepmeta--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta--Results] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] [GOOD] >> test.py::test[window-win_with_cur_row--ForceBlocks] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test.py::test[key_filter-dict_contains-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Results] >> test.py::test[weak_field-weak_field_num_access--Results] [GOOD] >> test.py::test[window-current/session_aliases--Results] >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[action-subquery-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] [SKIPPED] >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join--Results] >> test.py::test[insert-override-from_sorted_desc-Results] [GOOD] >> test.py::test[insert-select_subquery--Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-ForceBlocks] >> test.py::test[order_by-sort_decimals--Results] [GOOD] >> test.py::test[order_by-sort_with_take--Results] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[schema-copy-schema-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-ForceBlocks] >> test.py::test[schema-copy-schema-Results] >> test.py::test[insert-keepmeta--Results] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-ForceBlocks] >> test.py::test[join-join_key_cmp_udf--ForceBlocks] [GOOD] >> test.py::test[join-join_key_cmp_udf--Results] >> test.py::test[pg-tpcds-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[key_filter-dict_contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with-default.txt-ForceBlocks] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[insert-override-from_sorted_desc-Results] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test.py::test[insert-override-from_sorted_desc-Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] >> test.py::test[blocks-minmax_strings--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_strings--Results] >> test.py::test[window-win_inline_spec-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_inline_spec-default.txt-Results] >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] >> test.py::test[pg-pg_types_orderby--Results] [SKIPPED] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] >> test.py::test[schema-copy-schema-Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--ForceBlocks] >> test.py::test[key_filter-contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> test.py::test[aggregate-percentiles_containers--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_containers--Results] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[schema-copy-schema-Results] [GOOD] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] >> test.py::test[select-create_tuples-default.txt-Results] [GOOD] >> test.py::test[select-discard-default.txt-Results] >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test.py::test[join-join_key_cmp_udf--Results] [GOOD] >> test.py::test[join-left_all--ForceBlocks] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test.py::test[window-win_inline_spec-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--ForceBlocks] >> test.py::test[order_by-sort_with_take--Results] [GOOD] >> test.py::test[order_by-sort_with_take_limit--Results] >> test.py::test[window-current/session--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] >> test.py::test[insert-literals_to_string-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test.py::test[insert-select_with_sort_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-break_unique_fail--Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in--Results] >> test.py::test[window-win_with_cur_row--ForceBlocks] [GOOD] >> test.py::test[window-win_with_cur_row--Results] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[window-win_inline_spec-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] [GOOD] >> test.py::test[aggregate-percentiles_containers--Results] >> test.py::test[blocks-date_greater_scalar--Results] [GOOD] >> test.py::test[blocks-pg_sort--ForceBlocks] >> test.py::test[key_filter-string_with-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with-default.txt-Results] >> test.py::test[insert-literals_to_string-default.txt-Results] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Results] [GOOD] >> test.py::test[lineage-list_literal2-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal2-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_session-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[insert-override--ForceBlocks] >> test.py::test[blocks-minmax_strings--Results] [GOOD] >> test.py::test[blocks-minmax_tuple--Results] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] [GOOD] >> test.py::test[pragma-file-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> test.py::test[pg-tpch-q05-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] >> test.py::test[join-nested_semi_join--Results] [GOOD] >> test.py::test[join-premap_common_multiparents-off-Results] [SKIPPED] >> test.py::test[join-premap_context_dep--Results] >> test.py::test[aggr_factory-booland-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Results] >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_tz_date--Results] >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Results] >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test.py::test[blocks-minmax_strings--Results] [GOOD] >> test.py::test[blocks-nested_optionals--ForceBlocks] >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[window-win_with_cur_row--Results] [GOOD] >> test.py::test[ypath-empty_range--ForceBlocks] [SKIPPED] >> test.py::test[join-left_all--ForceBlocks] [GOOD] >> test.py::test[join-left_all--Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_yql_type--Results] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Results] [SKIPPED] >> test.py::test[join-cbo_7tables_only_common_join--Results] [SKIPPED] >> test.py::test[join-equi_join_two_mult_keys--Results] >> test.py::test[ypath-empty_range--Results] [SKIPPED] >> test.py::test[window-current/session_aliases--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] >> test.py::test[order_by-sort_with_take_limit--Results] [GOOD] >> test.py::test[pg-all_data--Results] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [GOOD] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[ypath-empty_range--Results] [SKIPPED] |86.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[expr-len--Results] |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[join-star_join_multi--Results] [GOOD] >> test.py::test[join-star_join_multi-off-Results] [SKIPPED] >> test.py::test[join-two_aggrs-default.txt-Results] >> test.py::test[blocks-combine_hashed_count_filter--Results] [GOOD] >> test.py::test[blocks-date_less--ForceBlocks] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-discard_reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-native_desc_reduce_with_presort--Results] [SKIPPED] >> test.py::test[produce-process_streaming-default.txt-Results] >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[join-left_all--Results] [GOOD] >> test.py::test[blocks-pg_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> test.py::test[join-lookupjoin_not_selected-off-ForceBlocks] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--Results] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test.py::test[insert-override--ForceBlocks] [GOOD] >> test.py::test[insert-override--Results] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-ForceBlocks] |86.9%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[key_filter-part_key_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-ranges--Results] >> test.py::test[blocks-nested_optionals--ForceBlocks] [GOOD] >> test.py::test[blocks-nested_optionals--Results] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> test.py::test[insert-override--Results] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-ForceBlocks] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test.py::test[blocks-nested_optionals--Results] [GOOD] >> test.py::test[blocks-tuple_type--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--Results] [GOOD] >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[join-join_comp_map_table-off-Results] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Results] >> test.py::test[aggr_factory-histogram-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--Results] >> test.py::test[join-premap_context_dep--Results] [GOOD] >> test.py::test[join-prune_keys--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[join-lookupjoin_not_selected-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt-Results] >> test.py::test[pg-all_data--Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test.py::test[pg-tpcds-q33-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] >> test.py::test[key_filter-ranges--Results] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-Results] [SKIPPED] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped--Results] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] |86.9%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[pg-all_data--Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[blocks-if--Results] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test.py::test[insert-select_after_replace-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-Results] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> test.py::test[blocks-tuple_type--ForceBlocks] [GOOD] >> test.py::test[blocks-tuple_type--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_with_view--Results] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> test.py::test[blocks-minmax_tuple--Results] [GOOD] >> test.py::test[blocks-not--Results] >> test.py::test[join-two_aggrs-default.txt-Results] [GOOD] >> test.py::test[join-yql-12022--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=794812) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[join-bush_dis_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in--Results] >> test.py::test[select-discard-default.txt-Results] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Results] >> test.py::test[insert-select_after_replace-default.txt-Results] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] [GOOD] >> test.py::test[window-full/session_aliases_compact--Results] >> test.py::test[blocks-tuple_type--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-single-Results] [SKIPPED] >> test.py::test[column_group-hint_append--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append--Results] [SKIPPED] >> test.py::test[column_order-select_where-default.txt-ForceBlocks] >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] >> test.py::test[produce-reduce_all_list-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test.py::test[aggregate-GroupByTwoFields--Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] >> test.py::test[blocks-date_less--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less--Results] |86.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[insert-select_after_replace-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-Results] >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[key_filter-tzdate--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test.py::test[join-prune_keys--Results] [GOOD] >> test.py::test[join-pullup_context_dep-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross--Results] >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[blocks-pg_to_numbers--Results] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test.py::test[blocks-if--Results] [GOOD] >> test.py::test[blocks-json_document_type--Results] |87.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--ForceBlocks] >> test.py::test[select-dot_in_alias-default.txt-Results] [GOOD] >> test.py::test[select-from_in_front-default.txt-Results] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Results] >> test.py::test[join-yql-12022--Results] [GOOD] >> test.py::test[join-yql-14847--Results] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] >> test.py::test[column_order-select_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> test.py::test[pg-tpcds-q77-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[join-left_semi_with_other--Results] >> test.py::test[flatten_by-flatten_member_is_struct--Results] [GOOD] >> test.py::test[hor_join-group_sampling--Results] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test.py::test[in-basic_in-default.txt-Results] >> test.py::test[column_order-select_where-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped--ForceBlocks] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] [GOOD] >> test.py::test[table_range-range_over_filter--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test.py::test[blocks-date_less--Results] [GOOD] >> test.py::test[blocks-div_uint64_opt2--ForceBlocks] |87.0%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-percentiles_grouped--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--Results] >> test.py::test[join-bush_in_in--Results] [GOOD] >> test.py::test[join-convert_key--Results] >> test.py::test[blocks-pg_to_numbers--Results] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test.py::test[select-from_in_front-default.txt-Results] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] >> test.py::test[pg-tpcds-q98-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] >> test.py::test[window-full/session_aliases_compact--Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--Results] >> test.py::test[key_filter-tzdate--Results] [GOOD] >> test.py::test[lambda-lambda_udf--Results] |87.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> test.py::test[join-pullup_cross--Results] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[join-pullup_left--Results] >> test.py::test[window-full/session_compact--Results] >> test.py::test[join-yql-14847--Results] [GOOD] >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[json-json_query/example--Results] >> test.py::test[in-basic_in-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted--Results] >> test.py::test[count-count_all_grouped--ForceBlocks] [GOOD] >> test.py::test[count-count_all_grouped--Results] >> test.py::test[produce-reduce_multi_in_sampling--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_python_stream--Results] [SKIPPED] >> test.py::test[ql_filter-integer_many_right--Results] >> test.py::test[blocks-div_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test.py::test[count-count_all_grouped--Results] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] >> test.py::test[join-convert_key--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval-off-Results] [SKIPPED] >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-sort_two_asc--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> test.py::test[lambda-lambda_udf--Results] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] |87.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] |87.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[join-equi_join_three_asterisk_eval-off-Results] [SKIPPED] >> test_auditlog.py::test_single_dml_query_logged[replace] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[select-simple_struct_field_access--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test.py::test[ql_filter-integer_many_right--Results] [GOOD] >> test.py::test[sampling-map-keyfilter-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test.py::test[json-json_query/example--Results] [GOOD] >> test.py::test[key_filter-calc_dependent-default.txt-Results] >> test.py::test[in-in_sorted--Results] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Results] >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[join-pullup_left-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming--Results] >> test_auditlog.py::test_single_dml_query_logged[select] >> test_auditlog.py::test_single_dml_query_logged[insert] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--Results] >> test.py::test[select-simple_struct_field_access--Results] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-Results] >> test.py::test[blocks-sort_two_asc--Results] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[like-like_clause_escape-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] >> test.py::test[blocks-sort_two_mix--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] >> test.py::test[aggregate-percentiles_grouped_expr--Results] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[bigdate-tz_table_fill--ForceBlocks] >> test.py::test[sampling-map-keyfilter-Results] [GOOD] >> test.py::test[sampling-read--Results] >> test.py::test[window-generic/aggregations_before_current--Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> test.py::test[key_filter-calc_dependent-default.txt-Results] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda--Results] >> test.py::test[in-in_tuple_table-default.txt-Results] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] >> YdbSdkSessionsPool::StressTestSync/0 >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test.py::test[select-trivial_order_by-default.txt-Results] [GOOD] >> test.py::test[select-type_assert-default.txt-Results] |87.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] |87.1%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda--Results] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Results] [SKIPPED] >> test.py::test[lineage-process-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> YdbSdkSessionsPool::StressTestSync/1 >> test.py::test[bigdate-tz_table_fill--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_fill--Results] >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce-with_premap-Results] [SKIPPED] >> test.py::test[sampling-subquery_default-default.txt-Results] >> test.py::test[blocks-string_len_and_cmp--Results] [GOOD] >> test.py::test[case-case_multi_val-default.txt-Results] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-append-with_view-Results] [SKIPPED] >> test.py::test[insert-append_view_fail--Results] |87.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_multi_key--Results] >> test.py::test[aggregate-aggregate_with_lambda--Results] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Results] >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] >> test.py::test[select-type_assert-default.txt-Results] [GOOD] >> test.py::test[select-where_cast-default.txt-Results] |87.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> test.py::test[insert-append_view_fail--Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Results] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |87.1%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |87.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] |87.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[sampling-subquery_default-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-on-Results] [SKIPPED] >> test.py::test[binding-anon_table_binding-default.txt-Results] >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Results] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[case-case_multi_val-default.txt-Results] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Results] >> test.py::test[window-leading/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-win_func_rank_by_part--Results] >> YdbSdkSessionsPool::PeriodicTask/0 >> test.py::test[key_filter-is_null_multi_key--Results] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter1--Results] [SKIPPED] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] >> test.py::test[select-where_cast-default.txt-Results] [GOOD] >> YdbSdkSessionsPool::StressTestAsync/0 >> YdbSdkSessionsPool1Session::CustomPlan/0 |87.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[blocks-combine_all_avg--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] |87.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[select-where_cast-default.txt-Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Results] >> test.py::test[insert-double_append_to_anonymous--Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-Results] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test.py::test[binding-anon_table_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_decimal--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [FAIL] >> test.py::test[sampling-subquery_expr-default.txt-Results] [GOOD] >> test.py::test[schema-limit_simple--Results] >> test.py::test[case-case_val_then_else-default.txt-Results] [GOOD] >> test.py::test[case-case_when_then-default.txt-Results] >> YdbSdkSessionsPool::WaitQueue/0 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Results] >> test.py::test[coalesce-coalesce_few_opt--Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Results] >> test.py::test[column_group-hint_anon_groups-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Results] [SKIPPED] >> test.py::test[column_group-length-perusage-Results] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert--Results] >> YdbSdkSessionsPool::WaitQueue/1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> test.py::test[window-win_func_rank_by_part--Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--Results] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |87.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |87.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> test.py::test[blocks-add_decimal--Results] [GOOD] >> test.py::test[blocks-block_input-aux_columns-Results] [SKIPPED] >> test.py::test[blocks-coalesce_bools--Results] >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] >> YdbSdkSessions::TestMultipleSessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |87.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> test.py::test[case-case_when_then-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-max-Results] [SKIPPED] >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Results] [SKIPPED] >> test.py::test[column_order-align_publish--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:543: Enable after accepting a pull request with merging configs >> test.py::test[limit-limit_skip_take-default.txt-Results] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] >> test.py::test[aggregate-compare_by_tuple--Results] [GOOD] >> test.py::test[aggregate-dedup_state_keys--Results] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:195: Test is failing right now >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> YdbSdkSessions::TestSessionPool [GOOD] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] |87.3%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> test.py::test[column_order-insert--Results] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000aef/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-05-05T03:12:21.806542Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:12:21.806528Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-05-05T03:12:21.793515Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:583: Enable after accepting a pull request with merging configs |87.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test.py::test[blocks-coalesce_bools--Results] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:243: Test is failing right now |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[column_order-insert--Results] [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[column_order-align_publish--Results] [GOOD] >> test.py::test[column_order-ordered_plus_native--Results] >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] >> test.py::test[aggregate-dedup_state_keys--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000ad9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-05-05T03:12:23.564834Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:12:23.564825Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-05-05T03:12:23.551971Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 03:11:49] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:11:50] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:11:51] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 03:11:52] "GET /nested_library.sql.txt HTTP/1.1" 200 - ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000ad4/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-05-05T03:12:23.842564Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:12:23.842543Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-05-05T03:12:23.830615Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_copy_table.py::TestCopyTable::test_copy_table[table_all_types-pk_types12-all_types12-index12---] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] >> test.py::test[column_order-ordered_plus_native--Results] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[blocks-coalesce_complex-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Results] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] |87.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] [GOOD] >> test.py::test[column_order-select_plain-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] >> test.py::test[blocks-combine_all_count_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter--Results] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] |87.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [FAIL] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithStorageNodeWith16Cpu::test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[sampling-bind_expr-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] [SKIPPED] >> test.py::test[sampling-join_left_sample-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_auditlog.py::test_dynconfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithStorageNodeWith16Cpu::test [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test.py::test[blocks-combine_all_max_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_some--Results] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test.py::test[column_order-select_plain-default.txt-Results] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] >> test.py::test[aggregate-group_by_expr_semi_join--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty--Results] >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test.py::test[blocks-combine_all_some--Results] [GOOD] >> test.py::test[blocks-combine_hashed_min--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] |87.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test_auditlog.py::test_dynconfig [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_actorsystem.py::TestWithStorageNodeWith17Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] |87.7%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test.py::test[blocks-combine_hashed_min--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] |87.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test.py::test[aggregate-group_by_gs_few_empty--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithStorageNodeWith17Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test |87.8%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a97/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-05-05T03:12:46.985364Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] |88.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [SKIPPED] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_actorsystem.py::TestWithStorageNodeWith18Cpu::test |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_actorsystem.py::TestWithStorageNodeWith18Cpu::test [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithStorageNodeWith19Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_actorsystem.py::TestWithStorageNodeWith19Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a7e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-05-05T03:12:58.595616Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a76/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-05-05T03:12:59.227721Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test.py::test[blocks-date_greater_scalar--Results] [GOOD] >> test.py::test[blocks-date_group_by--Results] >> test_actorsystem.py::TestWithStorageNodeWith1Cpu::test |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.3%| [TA] $(B)/ydb/tests/datashard/copy_table/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |88.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/copy_table/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_actorsystem.py::TestWithStorageNodeWith1Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> test_auditlog.py::test_dml_begin_commit_logged |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test.py::test[blocks-date_group_by--Results] [GOOD] >> test.py::test[blocks-date_sub_scalar--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:589: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:287: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a3c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a33/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-05-05T03:13:09.263878Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:09.263866Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-05-05T03:13:09.253552Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:13:09.389053Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:09.389043Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-05-05T03:13:09.368665Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:13:09.511561Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:09.511552Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-05-05T03:13:09.493528Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:13:09.634435Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:09.634422Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T03:13:09.616335Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:13:09.748961Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:09.748950Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-05-05T03:13:09.738752Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:13:09.864414Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:09.864403Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-05-05T03:13:09.853462Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] |88.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_recovery.py::TestRecovery::test_program_state_recovery >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a1a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a0b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-05-05T03:13:18.038982Z: {"tx_id":"01jtf6y16p8ymk17sqnr7tsv3s","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:18.038971Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-05-05T03:13:18.038729Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-05-05T03:13:18.062873Z: {"tx_id":"01jtf6y16p8ymk17sqnr7tsv3s","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:18.062860Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T03:13:18.041931Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:13:18.067806Z: {"tx_id":"01jtf6y16p8ymk17sqnr7tsv3s","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:13:18.067796Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-05-05T03:13:18.066324Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |88.7%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] |88.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=872915) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_recovery.py::TestRecovery::test_recovery |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:287: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/concurrent/futures/_base.py:328: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_recovery.py::TestRecovery::test_program_state_recovery [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_disposition.py::TestContinueMode::test_disposition_time_ago[v1-mvp_external_ydb_endpoint0] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] >> test_public_api.py::TestExplain::test_explain_data_query >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] [SKIPPED] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [SKIPPED] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] [GOOD] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] [SKIPPED] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields [GOOD] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr [GOOD] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=905395) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_metadatafields >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=897181) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:13:37] send response localhost:18021/?database=local ::1 - - [05/May/2025 03:13:37] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] [SKIPPED] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> test_select_1.py::TestSelect1::test_compile_error[v1] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_select_1.py::TestSelect1::test_select_1[v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [GOOD] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir >> test_row_dispatcher.py::TestPqRowDispatcher::test_2_connection >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000967/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-05-05T03:13:45.312537Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith19Cpu::test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=897606) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_1.py::TestSelect1::test_unwrap_null[v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=901563) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_1.py::TestSelect1::test_compile_error[v1] [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith19Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=907050) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_metadatafields [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] >> TLocksTest::Range_GoodLock0 >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=898461) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:13:38] send response localhost:24867/?database=local ::1 - - [05/May/2025 03:13:38] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:13:53] send response localhost:24867/?database=local ::1 - - [05/May/2025 03:13:53] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> test_select_1.py::TestSelect1::test_select_1[v1] [GOOD] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/00093c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-05-05T03:13:54.409503Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_recovery.py::TestRecovery::test_recovery [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> test.py::test_plans[row] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=914228) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] >> TLocksTest::Range_GoodLock1 [GOOD] >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-05-05T03:14:02.259606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794633415960358:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:02.259645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmp4yfjk6/pdisk_1.dat 2025-05-05T03:14:02.312563Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18784 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:02.361798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:02.361841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:02.362927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:02.389364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:02.397212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:02.461498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:02.470460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:02.718399Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794633371435647:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:02.718418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmplbtcWa/pdisk_1.dat 2025-05-05T03:14:02.733661Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22144 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:02.822465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:02.822520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:02.822545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T03:14:02.823542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:02.831276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:02.845645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:02.859592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:03.162309Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500794639010571913:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmp31yC4M/pdisk_1.dat 2025-05-05T03:14:03.170003Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:14:03.173715Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7145 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:03.264946Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:03.264975Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:03.265463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:03.266003Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:03.272119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:03.287209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:03.301520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:03.621329Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500794639597250988:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:03.621673Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmpfyW4rX/pdisk_1.dat 2025-05-05T03:14:03.632533Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15355 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:03.725210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:03.725264Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:03.725668Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:03.726259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:03.734264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waitin ... 1 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:05.011451Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:05.011486Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:05.011923Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.012475Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:05.022421Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.042249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.054440Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.412561Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500794645718808631:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:05.415156Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmpXS8fwM/pdisk_1.dat 2025-05-05T03:14:05.434457Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25058 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:05.517432Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:05.517472Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:05.517821Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:05.518503Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:05.524594Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.533431Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.551320Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.561921Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:05.897250Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500794646293814377:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:05.897273Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmpYkzaOH/pdisk_1.dat 2025-05-05T03:14:05.911987Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6640 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:06.001579Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:06.001631Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:06.001999Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:06.003053Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:06.009237Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:06.024286Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:14:06.037937Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:06.300552Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500794650767611020:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:06.300634Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d05/r3tmp/tmplWHWnf/pdisk_1.dat 2025-05-05T03:14:06.311836Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32638 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:06.403995Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:06.404042Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:06.404483Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:06.405057Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:06.415948Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:06.430303Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:06.443819Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=895974) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> test_big_state.py::TestBigState::test_gt_8mb[v1] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_select_1.py::TestSelect1::test_compile_error[v2] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith1Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith37Cpu::test >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_2_connection [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith1Cpu::test [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |89.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] >> TLocksTest::GoodDupLock |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithHybridNodeWith20Cpu::test >> test.py::test_plans[row] [GOOD] >> TFlatTest::SelectBigRangePerf >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_yds_bindings.py::TestBindings::test_yds_insert[v1] [SKIPPED] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] >> test_select_1.py::TestSelect1::test_compile_error[v2] [GOOD] >> TFlatTest::SelectRangeBothLimit [GOOD] >> test_select_1.py::TestSelect1::test_unwrap_null[v1] [GOOD] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-05-05T03:14:12.115259Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794675371229971:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:12.115517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d00/r3tmp/tmpJCEiFy/pdisk_1.dat 2025-05-05T03:14:12.188445Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8802 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:12.217214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:12.217243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:12.218443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:12.251860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-05T03:14:12.267496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... insert finished 1813 usec 1417 usec 1467 usec 1660 usec 1434 usec 1532 usec 1498 usec 1368 usec 1589 usec 1397 usec 2025-05-05T03:14:12.762048Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794678840827385:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:12.762089Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000d00/r3tmp/tmpPLyRDl/pdisk_1.dat 2025-05-05T03:14:12.771841Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22152 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:12.865434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:12.865476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:12.866329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:12.866476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:14:12.877069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] >> test_actorsystem.py::TestWithHybridNodeWith20Cpu::test [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] >> test_select_1.py::TestSelect1::test_select_1[v2] [GOOD] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TLocksTest::CK_Range_GoodLock [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith2Cpu::test |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] >> test_actorsystem.py::TestWithStorageNodeWith37Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-05-05T03:14:11.531052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794673839326461:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:11.531097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmpDgvDXg/pdisk_1.dat 2025-05-05T03:14:11.594243Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8936 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:11.627753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:11.637726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:11.665435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:11.665481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:11.666601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:11.700659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:11.710786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:11.993081Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794671065157557:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:11.993362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmpnNux53/pdisk_1.dat 2025-05-05T03:14:12.003981Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18063 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:12.096987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:12.097022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:12.097409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.098043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:12.106172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.120924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.134717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.474876Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500794678107770856:2064];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmprznXSL/pdisk_1.dat 2025-05-05T03:14:12.480822Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:14:12.486530Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15660 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:12.578338Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:12.578369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:12.578794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.579414Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:12.589352Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.603737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.618159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:12.952672Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500794677529192051:2206];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmpiqMCY9/pdisk_1.dat 2025-05-05T03:14:12.959701Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:14:12.965137Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32488 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:13.056023Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:13.056056Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:13.056380Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:13.057043Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:13.058623Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:13.065631Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... 57 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:14.489102Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:14.489137Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:14.489489Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.490216Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:14.490551Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.499783Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.514896Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.528729Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.861847Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500794686770493864:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:14.862275Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmpnZlZjh/pdisk_1.dat 2025-05-05T03:14:14.873999Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25627 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:14.965787Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:14.965818Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:14.966254Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.966853Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:14.976171Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:14.995172Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:15.005034Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:14:15.343597Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500794688676199889:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:15.343618Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmpKhM89g/pdisk_1.dat 2025-05-05T03:14:15.360656Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:15.447965Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:15.448000Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:15.448036Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.449184Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:15.451988Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.467412Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.481529Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.753153Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500794689834495160:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:15.753395Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003fb/r3tmp/tmpWvivVQ/pdisk_1.dat 2025-05-05T03:14:15.764384Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3891 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:15.857099Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:15.857135Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:15.857552Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.858131Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:15.865079Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.880270Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:15.893634Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=895502) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_actorsystem.py::TestWithHybridNodeWith21Cpu::test >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate [GOOD] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates >> test_actorsystem.py::TestWithHybridNodeWith2Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=909392) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/0008a8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-05-05T03:14:11.030011Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> test_actorsystem.py::TestWithHybridNodeWith21Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/0008a7/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-05-05T03:14:12.069751Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:12.069736Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-05-05T03:14:12.045045Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] Test command err: contrib/tools/python3/Lib/logging/__init__.py:1759: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000b4e/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/test_session_pool/testing_out_stuff/run.log' mode='a' encoding='utf-8'> ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_compile_error[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=914826) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] |89.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithHybridNodeWith35Cpu::test |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] [GOOD] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=919288) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_actorsystem.py::TestWithHybridNodeWith30Cpu::test >> TLocksFatTest::RangeSetBreak >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=915757) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000eaf/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v1/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000eaf/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v2/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=933001) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=906745) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithHybridNodeWith22Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=909957) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_actorsystem.py::TestWithStorageNodeWith38Cpu::test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> TLocksFatTest::RangeSetNotBreak [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-05-05T03:14:25.792982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794732394574004:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:25.793021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cfa/r3tmp/tmp2jipNP/pdisk_1.dat 2025-05-05T03:14:25.851525Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11090 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:25.926820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:25.926848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:25.927632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:25.927847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:25.930570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:25.938127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:26.002771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:26.012828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:28.068939Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794746391674595:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:28.068960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cfa/r3tmp/tmpxs1dwK/pdisk_1.dat 2025-05-05T03:14:28.089550Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6261 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:28.178543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:28.178576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:28.179000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:28.179636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:28.184861Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:28.193045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:28.207845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:28.222452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith22Cpu::test [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] >> test_actorsystem.py::TestWithHybridNodeWith30Cpu::test [GOOD] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith21Cpu::test |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithHybridNodeWith35Cpu::test [GOOD] >> test_public_api.py::TestSessionNotFound::test_session_not_found |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional [GOOD] |89.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=922570) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=917177) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith21Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith38Cpu::test [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=916204) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [GOOD] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [GOOD] |90.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldSuccess >> test_actorsystem.py::TestWithStorageNodeWith32Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/00087b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-05-05T03:14:31.458692Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:31.458679Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-05-05T03:14:31.455566Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:14:31.570320Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:31.570307Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-05-05T03:14:31.565748Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:14:31.680160Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:31.680148Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-05-05T03:14:31.676873Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:14:31.791684Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:31.791673Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T03:14:31.787711Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:14:31.906851Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:31.906825Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-05-05T03:14:31.901250Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T03:14:32.018973Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:14:32.018961Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-05-05T03:14:32.015671Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TFlatTest::SelectRangeItemsLimit >> test_big_state.py::TestBigState::test_gt_8mb[v1] [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldPageToken >> TFlatTest::SelectRangeForbidNullArgs2 >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test [GOOD] >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TLocksTest::Range_BrokenLockMax >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TLocksTest::BrokenLockUpdate >> test_actorsystem.py::TestWithComputeNodeWith22Cpu::test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-05-05T03:14:41.643222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794801986050433:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:41.643293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000280/r3tmp/tmpnPW88P/pdisk_1.dat 2025-05-05T03:14:41.703170Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14796 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:41.777175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:41.777224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:41.778188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:41.778771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:41.786596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:42.088363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794804757695889:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:42.088383Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000280/r3tmp/tmpXx5BIt/pdisk_1.dat 2025-05-05T03:14:42.099022Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12016 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:42.191854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:42.191884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:42.192297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:42.192889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:42.199150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TYdbControlPlaneStorageModifyBinding::ShouldValidate >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-05-05T03:14:42.001131Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794804868819627:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:42.001168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002fa/r3tmp/tmpGKT0sP/pdisk_1.dat 2025-05-05T03:14:42.061226Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22988 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:42.094156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:42.100869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:42.136833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:42.136893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:42.137968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:42.400904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794803851117537:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:42.400924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002fa/r3tmp/tmpHme4tR/pdisk_1.dat 2025-05-05T03:14:42.412615Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1155 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:42.503885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:42.503920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:42.504315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:42.504988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:42.514543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test_actorsystem.py::TestWithHybridNodeWith31Cpu::test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith36Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=900347) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageListBindings::ShouldSuccess >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> TYdbControlPlaneStorageListConnections::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=918527) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:14:23] send response localhost:7896/?database=local ::1 - - [05/May/2025 03:14:23] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:14:37] send response localhost:7896/?database=local ::1 - - [05/May/2025 03:14:37] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> TYdbControlPlaneStorageModifyBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] >> test_actorsystem.py::TestWithComputeNodeWith22Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith32Cpu::test [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] >> test_quota_exhaustion.py::TestYdbWorkload::test_delete |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors [GOOD] >> TLocksTest::BrokenNullLock [GOOD] |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-05-05T03:14:42.555744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794807183699410:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:42.555784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmp9aeKw0/pdisk_1.dat 2025-05-05T03:14:42.615580Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65312 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:42.652753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:42.660981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:42.690630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:42.690659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:42.691740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:42.722130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-05T03:14:42.731351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.012636Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794808622288362:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:43.012963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmp3HoxOi/pdisk_1.dat 2025-05-05T03:14:43.025440Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29080 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:43.116602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:43.116637Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:43.117012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.117654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:43.123892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.138699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.152252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.447474Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500794811499377757:2139];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:43.447577Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmp3rtSik/pdisk_1.dat 2025-05-05T03:14:43.470590Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14173 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:43.551319Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:43.551351Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:43.551889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:43.552428Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:43.554761Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.570736Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:14:43.575605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.588117Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.599939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.925750Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500794808607069489:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:43.925782Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmpJDFfxH/pdisk_1.dat 2025-05-05T03:14:43.938819Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25852 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:44.026507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:44.026550Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:44.027000Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:44.027565Z node 4 :HIVE WARN: HIV ... hot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmpLGN2kp/pdisk_1.dat 2025-05-05T03:14:45.405532Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2812 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:45.495322Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:45.495353Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:45.495725Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.496497Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:45.503822Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.517935Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.532586Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.918631Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500794819146148967:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:45.918650Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmpbjpKXi/pdisk_1.dat 2025-05-05T03:14:45.941722Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61122 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:46.018627Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:46.018660Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:46.019859Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:14:46.023018Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:46.027400Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.043212Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.056729Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmpdKXbUX/pdisk_1.dat 2025-05-05T03:14:46.425497Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:14:46.446835Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1487 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:46.522452Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:46.522486Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:46.522832Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.523521Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:46.532351Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.547121Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.562299Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.913706Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500794821581744575:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:46.914139Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003f1/r3tmp/tmp9r4mkg/pdisk_1.dat 2025-05-05T03:14:46.927905Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5064 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:47.017579Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:47.017629Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:47.017978Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:47.018614Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:47.029662Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:47.044509Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:14:47.058109Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-05-05T03:14:42.862651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794804475077157:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:42.862860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmpm523wg/pdisk_1.dat 2025-05-05T03:14:42.921673Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30782 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:42.956120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:42.962193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:42.964474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:42.964497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:42.965657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:43.027155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-05T03:14:43.036410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.324001Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794811803713317:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:43.326100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmpE9H5Fb/pdisk_1.dat 2025-05-05T03:14:43.340182Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13541 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:43.430790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:43.430835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:43.431270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:43.432415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:43.434754Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.445487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.460308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.474766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.809962Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500794808442003328:2066];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmp8EFEdx/pdisk_1.dat 2025-05-05T03:14:43.814703Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:14:43.820294Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31501 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:43.913406Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:43.913460Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:43.913912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.914387Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:43.921370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:43.935974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:14:43.950159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:14:44.228844Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500794814975928967:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:44.228878Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmp2fFWAp/pdisk_1.dat 2025-05-05T03:14:44.241185Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8304 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:44.333219Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:44.333267Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:44.333744Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:44.334284Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:44.341451Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... : 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:45.855337Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:45.855376Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:45.855721Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:45.856444Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:45.858535Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.871856Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.883674Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:45.896770Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.234027Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500794824787056102:2143];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmpfDvg7t/pdisk_1.dat 2025-05-05T03:14:46.239287Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:14:46.250287Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15342 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:46.338722Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:46.338771Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:46.339204Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.339762Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:46.350486Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:14:46.352545Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.368714Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.382435Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.724637Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500794821608160761:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:14:46.724657Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmposDKhD/pdisk_1.dat 2025-05-05T03:14:46.736342Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28269 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:46.828690Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:46.828730Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:46.829210Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:14:46.829796Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:46.830882Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.840149Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.855606Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:46.869456Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cf5/r3tmp/tmp6CNk4Q/pdisk_1.dat 2025-05-05T03:14:47.162780Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:14:47.163558Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28869 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:14:47.255230Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:14:47.255262Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:14:47.255566Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:47.256141Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:14:47.260056Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:47.274763Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:14:47.288694Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=921492) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] >> test_actorsystem.py::TestWithStorageNodeWith39Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith31Cpu::test [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldValidate >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldPageToken >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithComputeNodeWith10Cpu::test |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate >> test_actorsystem.py::TestWithHybridNodeWith36Cpu::test [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldValidate >> test_actorsystem.py::TestWithComputeNodeWith23Cpu::test |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TYdbControlPlaneStorageCreateQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess >> test_actorsystem.py::TestWithStorageNodeWith33Cpu::test >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith10Cpu::test [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] >> TYdbControlPlaneStorageListBindings::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldValidate |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=917573) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] >> TYdbControlPlaneStorageListBindings::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty >> test_actorsystem.py::TestWithComputeNodeWith23Cpu::test [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithStorageNodeWith39Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith33Cpu::test [GOOD] |90.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith11Cpu::test >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith32Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit >> TFlatTest::CrossRW >> test_actorsystem.py::TestWithStorageNodeWith5Cpu::test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TFlatTest::GetTabletCounters [GOOD] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_actorsystem.py::TestWithComputeNodeWith11Cpu::test [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-05-05T03:15:02.750430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500794892507613959:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:15:02.750454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cee/r3tmp/tmptIdPTx/pdisk_1.dat 2025-05-05T03:15:02.811387Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25634 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:15:02.885759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:15:02.885792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:15:02.886763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:15:02.886892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:15:02.899125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:15:03.222785Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500794894233710874:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:15:03.223038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cee/r3tmp/tmp70iVkD/pdisk_1.dat 2025-05-05T03:15:03.237341Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25387 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:15:03.326621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:15:03.326653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:15:03.327162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:15:03.327690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:15:03.340099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746414903401 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility >> test_actorsystem.py::TestWithComputeNodeWith24Cpu::test >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName >> test_actorsystem.py::TestWithStorageNodeWith5Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith37Cpu::test |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> test.py::test_run_benchmark[generic-column] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith34Cpu::test >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName >> test_actorsystem.py::TestWithHybridNodeWith32Cpu::test [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith24Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=934029) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |90.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=939890) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] >> test_actorsystem.py::TestWithComputeNodeWith12Cpu::test >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey >> test_actorsystem.py::TestWithStorageNodeWith3Cpu::test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith25Cpu::test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithStorageNodeWith6Cpu::test >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... 9Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:10.056530Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:10.056618Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:15:10.056619Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:10.056621Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:10.056942Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T03:15:10.056944Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:10.056946Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:10.057090Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:15:10.057092Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:10.057094Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:10.057180Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:15:10.057182Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:10.057183Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:10.057297Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:15:10.057299Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:10.057300Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:10.057467Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:15:10.057469Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:10.057470Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:10.071288Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:10.071307Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:10.090320Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:10.090339Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:10.095026Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:10.095042Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:10.116369Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:10.116369Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:10.116378Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:10.116387Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:10.116593Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:10.116603Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:10.116607Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:10.116610Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:10.116662Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:10.116671Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:10.116674Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:10.116676Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:10.116719Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:10.116721Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:10.116726Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:10.116728Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:10.116775Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:10.116777Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:10.116823Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:10.116824Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:10.116825Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:10.116826Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:10.116867Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:10.116868Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes": >> test_actorsystem.py::TestWithComputeNodeWith12Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> test_actorsystem.py::TestWithStorageNodeWith3Cpu::test [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith37Cpu::test [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> test_actorsystem.py::TestWithStorageNodeWith6Cpu::test [GOOD] |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf >> test_actorsystem.py::TestWithStorageNodeWith34Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith33Cpu::test |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... ouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:13.050192Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:13.050351Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:15:13.050360Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T03:15:13.050363Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:13.050363Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:13.050364Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:13.050365Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:13.050428Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-05-05T03:15:13.050430Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:13.050431Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:13.050444Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T03:15:13.050445Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:13.050446Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:13.066723Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:13.066739Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:13.090582Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:13.090612Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:13.097782Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:13.097800Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:13.097818Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:13.097827Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:13.098042Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:13.098074Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:13.098138Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:13.098140Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:13.098186Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:13.098189Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:13.098251Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:13.098253Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:13.098303Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:13.098304Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:13.098353Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:13.098355Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:13.098661Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:13.098667Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:13.105007Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:13.105034Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:13.105202Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:13.105215Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:13.105327Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:13.105336Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:13.105439Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:13.105441Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:13.630392Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:185: Revision of the connection has been changed already. Please restart the request with a new revision 2025-05-05T03:15:13.630688Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user@staff, utcuebkkbngl04m80ami] ModifyConnectionRequest: {connection_id: "utcuebkkbngl04m80ami" content { name: "test_connection_name_2" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } previous_revision: 10 } ERROR: {
: Error: Revision of the connection has been changed already. Please restart the request with a new revision, code: 1003 } |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithComputeNodeWith13Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=942992) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_actorsystem.py::TestWithComputeNodeWith25Cpu::test [GOOD] |90.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_disposition.py::TestContinueMode::test_disposition_time_ago[v1-mvp_external_ydb_endpoint0] [GOOD] |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith7Cpu::test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_actorsystem.py::TestWithComputeNodeWith13Cpu::test [GOOD] |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... tContext&)/pending_small". Create session OK 2025-05-05T03:15:13.756436Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:13.756437Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:13.756513Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T03:15:13.756521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:13.756523Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:13.756533Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:15:13.756535Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:13.756538Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:13.756567Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T03:15:13.756575Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:13.756576Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:13.756640Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T03:15:13.756648Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:13.756649Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:13.768024Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:13.768040Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:13.790011Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:13.790034Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:13.794401Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:13.794435Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:13.808517Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:13.808537Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:13.808861Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:13.808879Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:13.810305Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:13.810320Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:13.810352Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:13.810357Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:13.810519Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:13.810528Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:13.810571Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:13.810578Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:13.810586Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:13.810587Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:13.810618Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:13.810618Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:13.810650Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:13.810657Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:13.810664Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:13.810666Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:13.810742Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:13.810753Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:13.810786Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:13.810788Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldValidate >> TYdbControlPlaneStorageCreateConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName >> test_actorsystem.py::TestWithStorageNodeWith7Cpu::test [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith14Cpu::test >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey >> test_actorsystem.py::TestWithHybridNodeWith33Cpu::test [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] >> test_actorsystem.py::TestWithComputeNodeWith14Cpu::test [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic |90.6%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe >> test_eval.py::TestEval::test_eval_2_2[v1] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters >> test_actorsystem.py::TestWithHybridNodeWith38Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=916594) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:14:53] send response localhost:28050/?database=local ::1 - - [05/May/2025 03:14:53] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_actorsystem.py::TestWithStorageNodeWith8Cpu::test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType >> test_actorsystem.py::TestWithStorageNodeWith35Cpu::test >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections >> test_actorsystem.py::TestWithStorageNodeWith8Cpu::test [GOOD] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageListQueries::ShouldSuccess >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas >> TYdbControlPlaneStorageListQueries::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldPageToken ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... TestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:27.415900Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:27.415934Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:15:27.415936Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:27.415937Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:27.415974Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T03:15:27.415975Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:27.415976Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:27.416046Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:15:27.416048Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:27.416049Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:27.416369Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-05-05T03:15:27.416384Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:27.416385Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:27.418529Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:15:27.418537Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:27.418539Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:27.425049Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:27.425063Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:27.470121Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:27.470134Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:27.477073Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:27.477081Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:27.477085Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:27.477086Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:27.495358Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:27.495375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:27.495619Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:27.495622Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:27.495719Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:27.495721Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:27.495816Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:27.495819Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:27.495990Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:27.495993Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:27.496132Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:27.496134Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:27.496192Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:27.496193Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:27.496238Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:27.496239Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:27.496283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:27.496284Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:27.496344Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:27.496346Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:27.496770Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:27.496778Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas": |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas [GOOD] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty >> test_public_api.py::TestBadSession::test_simple >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::OverrideQuotas >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName >> TYdbControlPlaneStorageListQueries::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken >> TYdbControlPlaneStorageQuotas::OverrideQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::GetStaleUsage >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T03:15:29.487590Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:29.487591Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:29.487668Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T03:15:29.487670Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:29.487672Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:29.487742Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:15:29.487743Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:29.487744Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:29.487820Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T03:15:29.487822Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:29.487823Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:29.487880Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:15:29.487882Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:29.487883Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:29.487989Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:15:29.487991Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:29.487992Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:29.518254Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:29.518273Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:29.551588Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:29.551610Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:29.577789Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:29.577807Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:29.578234Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:29.578245Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:29.578331Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:29.578338Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:29.578408Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:29.578414Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:29.578467Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:29.578475Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:29.578520Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:29.578527Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:29.578541Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:29.578558Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:29.578626Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:29.578634Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:29.578683Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:29.578691Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:29.578731Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:29.578737Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:29.578745Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:29.578753Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:29.578805Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:29.578815Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:29.578852Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:29.578860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": >> TYdbControlPlaneStorageQuotas::GetStaleUsage [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate >> test_eval.py::TestEval::test_eval_2_2[v1] [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldValidate >> test_actorsystem.py::TestWithHybridNodeWith38Cpu::test [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] |90.7%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith35Cpu::test [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test [GOOD] |90.7%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageListQueries::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterName >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource [GOOD] >> TYdbControlPlaneStorageTest::ShouldCreateTable >> TYdbControlPlaneStorageTest::ShouldCreateTable [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterName [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterType >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser >> test.py::test_run_benchmark[generic-column] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] >> TYdbControlPlaneStorageListQueries::ShouldFilterType [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess >> TYdbControlPlaneStorageListQueries::ShouldFilterMode [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... riteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T03:15:37.239146Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:37.239147Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:37.239279Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T03:15:37.239282Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:37.239283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:37.239375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:15:37.239385Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:37.239386Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:37.239848Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:15:37.239857Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:37.239859Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:37.240052Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:15:37.240064Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:37.240065Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:37.240194Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:15:37.240204Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:37.240206Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:37.240313Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:15:37.240321Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:37.240322Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:37.252715Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:37.252734Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:37.278579Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:37.278625Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:37.287543Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:37.287560Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:37.287958Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:37.287969Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:37.288009Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:37.288017Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:37.288470Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:37.288480Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:37.288499Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:37.288502Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:37.288597Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:37.288605Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:37.288635Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:37.288651Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:37.288677Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:37.288680Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:37.288722Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:37.288732Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:37.288767Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:37.288774Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:37.293843Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:37.293857Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:37.294061Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:37.294069Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:37.294167Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:37.294177Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small": >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_eval.py::TestEval::test_eval_2_2[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=904200) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:13:48] send response localhost:10700/?database=local ::1 - - [05/May/2025 03:13:48] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... G: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:44.979830Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T03:15:44.979839Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:44.979841Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:44.979968Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:15:44.979976Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:44.979978Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:44.980052Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:15:44.980060Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:44.980061Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:44.980349Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:15:44.980371Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:44.980372Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:44.980774Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:15:44.980784Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:44.980785Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:44.981190Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:15:44.981210Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:44.981211Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:44.988585Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:44.988604Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:45.005938Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:45.005959Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:45.017717Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:45.017758Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:45.017821Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:45.017843Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:45.018029Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:45.018060Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:45.018113Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:45.018122Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:45.018187Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:45.018216Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:45.018253Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:45.018261Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:45.018325Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:45.018335Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:45.018364Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:45.018373Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:45.018425Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:45.018434Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:45.018440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:45.018442Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:45.018500Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:45.018509Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:45.018512Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:45.018513Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:45.018566Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:45.018574Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:45.336200Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "abra" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:664: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection >> test_actorsystem.py::TestWithHybridNodeWith39Cpu::test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... ualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:46.202108Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T03:15:46.202125Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:46.202128Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:46.202364Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T03:15:46.202375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:46.202376Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:46.202426Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:15:46.202440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:46.202442Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:46.203087Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T03:15:46.203100Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:46.203102Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:46.203565Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:15:46.203581Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:46.203583Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:46.204521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T03:15:46.204533Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:46.204535Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:46.215284Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:46.215308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:46.232832Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:46.232863Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:46.248656Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:46.248672Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:46.248860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:46.248881Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:46.248884Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:46.248890Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:46.248955Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:46.248965Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:46.249016Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:46.249018Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:46.249071Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:46.249081Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:46.249083Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:46.249085Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:46.249204Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:46.249210Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:46.249359Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:46.249381Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:46.249916Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:46.249923Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:46.249950Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:46.249982Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:46.250013Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:46.250022Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:46.250104Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:46.250114Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas": >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate >> test_public_api.py::TestBadSession::test_simple [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... session OK 2025-05-05T03:15:52.103691Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:52.103694Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:52.103697Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:52.103698Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:52.103779Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:15:52.103800Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:52.103801Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:52.105051Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T03:15:52.105068Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:52.105079Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:52.105860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T03:15:52.105872Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:52.105874Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:52.109492Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:15:52.109511Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:15:52.130022Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:15:52.130042Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:15:52.147821Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:15:52.147844Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:15:52.147982Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:15:52.147996Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:15:52.148191Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:15:52.148201Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:15:52.148225Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:15:52.148242Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:15:52.148257Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:15:52.148258Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:15:52.148317Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:15:52.148321Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:15:52.148323Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:15:52.148326Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:15:52.148375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:15:52.148375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:15:52.148377Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:15:52.148382Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:15:52.148433Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:15:52.148434Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:15:52.148436Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:15:52.148442Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:15:52.148486Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:15:52.148488Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:15:52.148493Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:15:52.148495Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:15:52.563275Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: [yandexcloud://test_folder_id_1, test_user@staff, utbuebkkahufndqdburl] CreateBindingRequest, validation failed: **** (D7BA8005) content { name: "test_binding_name_1" connection_id: "utcuebkkai95c0a29eph" setting { object_storage { subset { path_pattern: "/root/" schema { column { name: "a" type { type_id: BOOL } } } partitioned_by: "a" } } } acl { visibility: PRIVATE } } error:
: Error: Column "a" from projection does not support Bool type, code: 400010 >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck >> test_actorsystem.py::TestWithHybridNodeWith39Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=899285) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse [GOOD] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldValidate >> TYdbControlPlaneStorageModifyConnection::ShouldValidate >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] >> TYdbControlPlaneStorageControlQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] >> TYdbControlPlaneStorageModifyConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-std] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:09 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... AGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:00.498467Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:00.498967Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T03:16:00.498976Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:00.498978Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:00.499108Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:16:00.499118Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:00.499119Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:00.499221Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:16:00.499223Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-05-05T03:16:00.499225Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:00.499226Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:00.499241Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:00.499242Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:00.507781Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:16:00.507813Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:16:00.526732Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:16:00.526753Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:16:00.527008Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:00.527017Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:16:00.536918Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:00.536935Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:16:00.536999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:00.537012Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:16:00.537173Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:00.537183Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:16:00.537198Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:00.537208Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:16:00.537242Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:00.537249Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:16:00.537295Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:00.537303Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:16:00.537355Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:00.537363Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:16:00.537366Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:00.537369Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:16:00.537406Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:00.537414Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:16:00.537955Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:00.537965Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:16:00.537974Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:00.537979Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:16:00.538436Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:00.538447Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-fifo] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] [GOOD] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test [GOOD] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldValidate >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageModifyQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters |91.0%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldSuccess |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] Test command err: 2025-05-05T03:15:52.231427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500795104604679116:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:15:52.231466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000e0e/r3tmp/tmphkgrCa/pdisk_1.dat 2025-05-05T03:15:52.299952Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64123, node 1 2025-05-05T03:15:52.313565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:15:52.313579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:15:52.313581Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:15:52.313621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-05-05T03:15:52.334033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:15:52.334059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:15:52.335122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T03:15:52.377340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:15:52.386055Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvNodesHealthCheckRequest 2025-05-05T03:15:52.705256Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T03:15:52.706451Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvCreateQueryRequest 2025-05-05T03:15:52.706937Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvDescribeQueryRequest Wait query execution 0.000386s: STARTING 2025-05-05T03:15:53.705650Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T03:15:53.705941Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Start run actor. Compute state: STARTING 2025-05-05T03:15:53.705970Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T03:15:53.705990Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi FillConnections 2025-05-05T03:15:53.706017Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Run actors params: { QueryId: utquebkkahq3i7uko7vi CloudId: mock_cloud UserId: root@builtin Owner: cfdf94dd-83259c60-6f5ef38b-19a6fb382 PreviousQueryRevision: 1 Connections: 0 Bindings: 0 AccountIdSignatures: 0 QueryType: STREAMING ExecuteMode: RUN ResultId: utruebkkagrjkoonqqf5 StateLoadMode: EMPTY StreamingDisposition: { } Status: STARTING DqGraphs: 0 DqGraphIndex: 0 Resource.TopicConsumers: 0 } 2025-05-05T03:15:53.706056Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Compiling query ... 2025-05-05T03:15:53.706353Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:53.706423Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebkkahq3i7uko7vi Forward ping response. Success: 1. Cookie: 2 2025-05-05T03:15:53.706480Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:53.706507Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebkkahq3i7uko7vi Forward ping response. Success: 1. Cookie: 0 2025-05-05T03:15:53.707332Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvDescribeQueryRequest Wait query execution 1.000794s: RUNNING 2025-05-05T03:15:53.739464Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Graph (execution) with tasks: 1 2025-05-05T03:15:53.739738Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Overall dq tasks: 1 2025-05-05T03:15:53.739798Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Graph 0 2025-05-05T03:15:53.739995Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:53.740097Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebkkahq3i7uko7vi Forward ping response. Success: 1. Cookie: 0 2025-05-05T03:15:53.740167Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:53.740217Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebkkahq3i7uko7vi Forward ping response. Success: 1. Cookie: 1 2025-05-05T03:15:53.740221Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Overall dq tasks: 1 2025-05-05T03:15:53.740349Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:53.740415Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Executer: [1:7500795108899647097:2356], Controller: [1:7500795108899647099:2358], ResultIdActor: [1:7500795108899647098:2357] 2025-05-05T03:15:53.740438Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebkkahq3i7uko7vi Forward ping response. Success: 1. Cookie: 0 2025-05-05T03:15:53.740607Z node 1 :YQL_PROXY WARN: SessionId: cfdf94dd-83259c60-6f5ef38b-19a6fb38 2025-05-05 03:15:53.740 WARN ydb-tests-fq-control_plane_storage(pid=1023296, tid=0x00007F21C5BF4640) [DQ] resource_allocator.cpp:259: {utquebkkahq3i7uko7vi/[1:7500795108899647100:2359]} Send TEvAllocateWorkersRequest to Id:6238c62e-4bbe540b-e635880-bf10cf2d,ghrun-4mbpxiiuh4,NodeId:1, 2025-05-05T03:15:53.744498Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvWriteResultDataRequest 2025-05-05T03:15:53.745187Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Query response SUCCESS. Result set index: 0. Issues count: 0. Rows count: 1 2025-05-05T03:15:53.745604Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Is about to finish query with status COMPLETED 2025-05-05T03:15:53.745615Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebkkahq3i7uko7vi Write finalizing status: COMPLETING 2025-05-05T03:15:53.745773Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:53.746105Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T03:15:54.706101Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T03:15:54.707779Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvDescribeQueryRequest 2025-05-05T03:15:54.708248Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetResultDataRequest 2025-05-05T03:15:54.949820Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500795113384402630:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:15:54.949840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000e0e/r3tmp/tmpCOwEX2/pdisk_1.dat 2025-05-05T03:15:54.965329Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64964, node 2 2025-05-05T03:15:54.975025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:15:54.975037Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:15:54.975039Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:15:54.975085Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T03:15:55.055317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:15:55.055359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:15:55.055729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:15:55.056398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:15:55.059187Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvNodesHealthCheckRequest 2025-05-05T03:15:55.396891Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T03:15:55.397126Z node 2 :FQ_RUN_ACTOR TRACE: [ydb] [ComputeDatabaseControlPlane]: Scope: yandexcloud://fqrun Single control plane mode has been chosen 2025-05-05T03:15:55.397404Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start synchronization for the scope yandexcloud://fqrun 2025-05-05T03:15:55.397886Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start fetch connections stage for the scope (single) yandexcloud://fqrun 2025-05-05T03:15:55.397897Z node 2 :FQ_RUN_ACTOR TRACE: [ydb] [SynchronizationService]: Send list connections: scope = yandexcloud://fqrun, page token = 2025-05-05T03:15:55.398035Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvListConnectionsRequest 2025-05-05T03:15:55.398088Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start fetch bindings stage for the scope yandexcloud://fqrun 2025-05-05T03:15:55.398096Z node 2 :FQ_RUN_ACTOR TRACE: [ydb] [SynchronizationService]: Send list bindings: scope = yandexcloud://fqrun, page token = 2025-05-05T03:15:55.398125Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvListBindingsRequest 2025-05-05T03:15:55.398170Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start describe bindings stage for the scope yandexcloud://fqrun 2025-05-05T03:15:55.398178Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start create external data sources stage for the scope (bindigns list is empty) yandexcloud://fqrun 2025-05-05T03:15:55.398185Z node 2 :FQ ... ::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:12.877786Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:16:12.877788Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:12.877790Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:12.877873Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-05-05T03:16:12.877875Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:12.877876Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:12.877930Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:16:12.877932Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:12.877933Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:12.877941Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T03:16:12.877943Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:12.877944Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:12.879139Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:16:12.879148Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:12.879150Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:12.879310Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T03:16:12.879317Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:12.879318Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:12.902275Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:16:12.902289Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:16:12.924322Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:16:12.924343Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:16:12.929944Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:12.929966Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:16:12.930272Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:12.930286Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:16:12.942540Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:12.942567Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:16:12.942839Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:12.942853Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:16:12.943252Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:12.943263Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:16:12.943349Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:12.943357Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:16:12.943408Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:12.943411Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:16:12.946828Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:12.946841Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:16:12.947098Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:12.947108Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:16:12.947109Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:12.947114Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:16:12.947233Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:12.947240Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:16:12.947282Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:12.947292Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:16:12.947336Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:12.947344Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas": >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> TYdbControlPlaneStorageGetResult::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldEmpty >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageCreateBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test.py::test[join-yql-4275-off-ForceBlocks] >> TYdbControlPlaneStorageGetResult::ShouldEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test 2025-05-05 03:15:49,817 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 03:15:50,173 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 367503 139M 143M 90.4M ydb-tests-olap-data_quotas --basetemp /home/runner/.ya/build/build_root/177e/000e7f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 370048 6.3G 6.3G 6.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/177e/000e7f/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0 Test command err: upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert #103 ok, result: [] Quota exceeded False upsert #104 ok, result: [] Quota exceeded False upsert #105 ok, result: [] Quota exceeded False upsert #106 ok, result: [] Quota exceeded False upsert #107 ok, result: [] Quota exceeded False upsert #108 ok, result: [] Quota exceeded False upsert #109 ok, result: [] Quota exceeded False upsert #110 ok, result: [] Quota exceeded False upsert #111 ok, result: [] Quota exceeded False upsert #112 ok, result: [] Quota exceeded False upsert #113 ok, result: [] Quota exceeded False upsert #114 ok, result: [] Quota exceeded False upsert #115 ok, result: [] Quota exceeded False upsert #116 ok, result: [] Quota exceeded False upsert #117 ok, result: [] Quota exceeded False upsert #118 ok, result: [] Quota exceeded False upsert #119 ok, result: [] Quota exceeded False upsert #120 ok, result: [] Quota exceeded False upsert #121 ok, result: [] Quota exceeded False upsert #122 ok, result: [] Quota exceeded False upsert #123 ok, result: [] Quota exceeded False upsert #124 ok, result: [] Quota exceeded False upsert #125 ok, result: [] Quota exceeded False upsert #126 ok, result: [] Quota exceeded False upsert #127 ok, result: [] Quota exceeded False upsert #128 ok, result: [] Quota exceeded False upsert #129 ok, result: [] Quota exceeded False upsert #130 ok, result: [] Quota exceeded False upsert #131 ok, result: [] Quota exceeded False upsert #132 ok, result: [] Quota exceeded False upsert #133 ok, result: [] Quota exceeded False upsert #134 ok, result: [] Quota exceeded False upsert #135 ok, result: [] Quota exceeded False upsert #136 ok, result: [] Quota exceeded False upsert #137 ok, result: [] Quota exceeded False upsert #138 ok, result: [] Quota exceeded False upsert #139 ok, result: [] Quota exceeded False upsert #140 ok, result: [] Quota exceeded False upsert #141 ok, result: [] Quota exceeded False upsert #142 ok, result: [] Quota exceeded False upsert #143 ok, result: [] Quota exceeded False upsert #144 ok, result: [] Quota exceeded False upsert #145 ok, result: [] Quota exceeded False upsert #146 ok, result: [] Quota exceeded False upsert #147 ok, result: [] Quota exceeded False upsert #148 ok, result: [] Quota exceeded False upsert #149 ok, result: [] Quota exceeded False upsert #150 ok, result: [] Quota exceeded False upsert #151 ok, result: [] Quota exceeded False upsert #152 ok, result: [] Quota exceeded False upsert #153 ok, result: [] Quota exceeded False upsert #154 ok, result: [] Quota exceeded False upsert #155 ok, result: [] Quota exceeded False upsert #156 ok, result: [] Quota exceeded False upsert #157 ok, result: [] Quota exceeded False upsert #158 ok, result: [] Quota exceeded False upsert #159 ok, result: [] Quota exceeded False upsert #160 ok, result: [] Quota exceeded False upsert #161 ok, result: [] Quota exceeded False upsert #162 ok, result: [] Quota exceeded False upsert #163 ok, result: [] Quota exceeded False upsert #164 ok, result: [] Quota exceeded False upsert #165 ok, result: [] Quota exceeded False upsert #166 ok, result: [] Quota exceeded False upsert #167 ok, result: [] Quota exceeded False upsert #168 ok, result: [] Quota exceeded False upsert #169 ok, result: [] Quota exceeded False upsert #170 ok, result: [] Quota exceeded False upsert #171 ok, result: [] Quota exceeded False upsert #172 ok, result: [] Quota exceeded False upsert #173 ok, result: [] Quota exceeded False upsert #174 ok, result: [] Quota exceeded False upsert #175 ok, result: [] Quota exceeded False upsert #176 ok, result: [] Quota exceeded False upsert #177 ok, result: [] Quota exceeded False upsert #178 ok, result: [] Quota exceeded False upsert #179 ok, result: [] Quota exceeded False upsert #180 ok, result: [] Quota exceeded False upsert #181 ok, result: [] Quota exceeded False upsert #182 ok, result: [] Quota exceeded False upsert #183 ok, result: [] Quota exceeded False File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 98, in test self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, 'huge', i, retries=0)) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 78, in upsert_until_overload res = do_upsert(i) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 98, in self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, 'huge', i, retries=0)) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 56, in upsert_test_chunk return session.execute_with_retries(f""" File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...s-olap-data_quotas', '--basetemp', '/home/runner/.ya/build/build_root/177e/000e7f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/177e/000e7f/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/177e/000e7f', '--source-root', '/home/runner/.ya/build/build_root/177e/000e7f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/177e/000e7f/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/data_quotas', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/olap/data_quotas', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...s-olap-data_quotas', '--basetemp', '/home/runner/.ya/build/build_root/177e/000e7f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/177e/000e7f/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/177e/000e7f', '--source-root', '/home/runner/.ya/build/build_root/177e/000e7f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/177e/000e7f/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/data_quotas', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/olap/data_quotas', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:10 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:16.284407Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T03:16:16.284415Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:16.284416Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:16.284724Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:16:16.284733Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:16.284734Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:16.284798Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:16:16.284812Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:16.284815Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:16.285253Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:16:16.285264Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:16.285265Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:16.285278Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:16:16.285281Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:16.285283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:16.297352Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:16:16.297368Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:16:16.310378Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:16:16.310397Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:16:16.315413Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:16.315437Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:16:16.315480Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:16.315486Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:16:16.315663Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:16.315672Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:16:16.326546Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:16.326563Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:16:16.326679Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:16.326691Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:16:16.326859Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:16.326868Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:16:16.326874Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:16.326875Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:16:16.326932Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:16.326932Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:16.326934Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:16:16.326940Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:16:16.327079Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:16.327090Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:16.327092Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:16:16.327105Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:16:16.327201Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:16.327209Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:16:16.327230Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:16.327239Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:16:17.103395Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: PingTaskRequest (resign): UNAVAILABLE 1 2025-05-05T03:16:17.103366Z 0.000000s 2025-05-05T03:16:17.335729Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: PingTaskRequest (resign): UNAVAILABLE 1 2025-05-05T03:16:17.335708Z 0.000000s 2025-05-05T03:16:17.500777Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: Validation: (NYql::TCodeLineException) :0: Error parsing proto message for query. Please contact internal support >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test.py::test[join-yql-4275-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-4275-off-Results] [SKIPPED] >> test.py::test[json-jsondocument/select--ForceBlocks] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections >> test_ping.py::TestPing::test_error_on_cgi_parameters |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePingTask::ShouldValidate >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] >> TYdbControlPlaneStoragePingTask::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test.py::test[json-jsondocument/select--ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/select--Results] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAst >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_ping.py::TestPing::test_error_on_cgi_parameters [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName >> test_ping.py::TestPing::test_error_on_non_ping_path >> test_ping.py::TestPing::test_error_on_non_ping_path [GOOD] >> test.py::test[json-jsondocument/select--Results] [GOOD] >> test_ping.py::TestPing::test_ping >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [GOOD] >> test.py::test[key_filter-mixed_opt_bounds--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-mixed_opt_bounds--Results] [SKIPPED] >> test.py::test[key_filter-nile_pred--ForceBlocks] >> test_ping.py::TestPing::test_ping [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:10 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... tualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T03:16:21.765212Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:21.765214Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:21.765350Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:16:21.765362Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:21.765364Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:21.765548Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T03:16:21.765558Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:21.765559Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:21.766089Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:16:21.766100Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:21.766102Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:21.766585Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:16:21.766597Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:21.766599Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:21.766787Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:16:21.766795Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:21.766797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:21.775691Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:16:21.775707Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:16:21.822953Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:16:21.822973Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:16:21.850013Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:21.850031Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:16:21.850111Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:21.850122Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:16:21.850285Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:21.850293Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:16:21.850356Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:21.850365Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:16:21.850381Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:21.850384Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:16:21.850410Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:21.850412Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:16:21.850455Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:21.850456Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:16:21.850484Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:21.850486Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:16:21.850512Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:21.850514Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:16:21.850551Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:21.850553Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:16:21.850554Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:21.850556Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:16:21.850611Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:21.850619Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:16:21.854845Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:21.854863Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> TYdbControlPlaneStoragePipeline::ShouldCheckAst [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] [GOOD] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl >> test.py::test[key_filter-nile_pred--ForceBlocks] [GOOD] >> test.py::test[key_filter-nile_pred--Results] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test.py::test[key_filter-nile_pred--Results] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter2--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter2--Results] [SKIPPED] >> test.py::test[key_filter-uuid--ForceBlocks] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldValidate >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] >> TYdbControlPlaneStorageGetTask::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending |91.2%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:10 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:28.400108Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:28.400257Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:16:28.400266Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:28.400268Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:28.400335Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T03:16:28.400343Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:28.400344Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:28.400401Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T03:16:28.400403Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:28.400404Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:28.402321Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:16:28.402334Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:28.402336Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:28.402628Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T03:16:28.402632Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:28.402634Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:28.402743Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:16:28.402744Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:28.402745Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:28.416521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:16:28.416541Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:16:28.438368Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:16:28.438390Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:16:28.454353Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:16:28.454376Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:16:28.455235Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:16:28.455249Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:16:28.455483Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:16:28.455492Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:16:28.455599Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:16:28.455606Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:16:28.455700Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:16:28.455708Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:16:28.458283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:16:28.458297Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:16:28.458347Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:16:28.458358Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:16:28.458447Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:16:28.458449Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:16:28.458526Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:16:28.458527Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:16:28.458528Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:16:28.458543Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:16:28.458578Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:16:28.458580Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T03:16:28.458609Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:16:28.458612Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:16:28.462227Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:16:28.462246Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants": >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_ping.py::TestPing::test_ping [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test.py::test[key_filter-uuid--ForceBlocks] [GOOD] >> test.py::test[key_filter-uuid--Results] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=902347) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] >> test.py::test[key_filter-uuid--Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-ForceBlocks] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:10 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.001511Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.001832Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk99i2i87i20u7] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.090725Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.091007Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk99fb83o005sf] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.179467Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.179793Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk99ck68bl9nes] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.273308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.273603Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk999tflo1dhc8] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.362754Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.363036Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk9971sirt01tv] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.451941Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.452238Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk994afnbeegq2] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.474424Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: BAD_SESSION, Issues: [ {
: Error: Exceeded maximum allowed number of active transactions, code: 2014 } {
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:861: Too many transactions, current active: 10 MaxTxPerSession: 10 } ], Query: --!syntax_v1 -- Query name: Unknown query name PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateConnection::TTestCaseShouldCheckCommitTransactionReadWrite::Execute_(NUnitTest::TTestContext&)"); DECLARE $idempotency_key as String; DECLARE $scope as String; SELECT `response` FROM `idempotency_keys` WHERE `scope` = $scope AND `idempotency_key` = $idempotency_key; 2025-05-05T03:16:35.569277Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.569590Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk991jagsrgugk] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.660662Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.661038Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98u0qv3hgg77] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.753653Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.753965Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98r7fnsgadu6] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.852187Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.852522Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98ocnih4lttv] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:35.942545Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:35.942843Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98lcc5aabd2e] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:36.038448Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:36.038682Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98ikavrembmd] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:36.125950Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:36.126277Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98fmmglodpiv] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:36.215678Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:36.215993Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98d15nojk3pv] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:36.308717Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:36.308966Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk98a9go2hj3t2] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T03:16:36.386447Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T03:16:36.386741Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebkk987epqtcrloa] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [GOOD] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-ForceBlocks] |91.3%| [TA] $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |91.4%| [TA] {RESULT} $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_where-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] |91.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1019088) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tr ... rceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/pool.py:268: ResourceWarning: unclosed running multiprocessing pool ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [GOOD] >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-fifo] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] |91.4%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] |91.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [GOOD] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-fifo] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167288 10140 ? Ss May04 0:03 /sbin/init root 2 0.0 0.0 0 0 ? S May04 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< May04 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< May04 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< May04 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< May04 0:00 [netns] root 8 0.0 0.0 0 0 ? I< May04 0:00 [kworker/0:0H-events_highpri] root 9 1.2 0.0 0 0 ? I May04 3:10 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< May04 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S May04 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/0] root 15 0.0 0.0 0 0 ? I May04 0:06 [rcu_sched] root 16 0.0 0.0 0 0 ? S May04 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S May04 0:03 [migration/1] root 23 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< May04 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S May04 0:00 [migration/2] root 29 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< May04 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S May04 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< May04 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S May04 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< May04 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S May04 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< May04 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S May04 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< May04 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S May04 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< May04 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S May04 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< May04 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S May04 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< May04 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S May04 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< May04 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S May04 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< May04 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S May04 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< May04 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S May04 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< May04 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S May04 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< May04 0:00 [kworker/14:0H-kblockd] root 104 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S May04 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< May04 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S May04 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< May04 0:00 [kworker/16:0H-kblockd] root 116 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S May04 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< May04 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S May04 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< May04 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S May04 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< May04 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S May04 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< May04 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S May04 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< May04 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S May04 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< May04 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S May04 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< May04 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S May04 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< May04 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S May04 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S May04 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S May04 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S May04 0:00 [ksoftirqd/25] root 169 0.0 0.0 0 0 ? I< May04 0:00 [kworker/25:0H-events_highpri] root 170 0 ... E DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:17:01.480103Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:17:01.480225Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T03:17:01.480233Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:17:01.480234Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:17:01.480308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T03:17:01.480313Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T03:17:01.480317Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:17:01.480317Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:17:01.480318Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:17:01.480320Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:17:01.481060Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T03:17:01.481073Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:17:01.481074Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:17:01.481084Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T03:17:01.481087Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:17:01.481088Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:17:01.482169Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T03:17:01.482178Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:17:01.482181Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:17:01.495679Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)" 2025-05-05T03:17:01.495700Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)": 2025-05-05T03:17:01.524821Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T03:17:01.524846Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T03:17:01.531257Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T03:17:01.531287Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T03:17:01.538330Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T03:17:01.538348Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T03:17:01.538615Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T03:17:01.538624Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T03:17:01.538710Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T03:17:01.538777Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T03:17:01.538860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T03:17:01.538863Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T03:17:01.539071Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T03:17:01.539075Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T03:17:01.539213Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T03:17:01.539215Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T03:17:01.539283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T03:17:01.539284Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T03:17:01.539325Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T03:17:01.539327Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T03:17:01.539373Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T03:17:01.539375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T03:17:01.539412Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T03:17:01.539414Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T03:17:01.539455Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T03:17:01.539456Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T03:17:01.539498Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T03:17:01.539500Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings": >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/protobuf/py3/google/protobuf/text_format.py:568: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/protobuf/py3/google/protobuf/text_format.py:568: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-std] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] [GOOD] |91.6%| [TA] $(B)/ydb/tests/fq/control_plane_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test.py::test[join-join_and_distinct_key-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |91.6%| [TA] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [GOOD] >> test.py::test[join-join_without_correlation_names--Results] [GOOD] >> test.py::test[join-left_join_null_column--Results] >> test.py::test[blocks-string_filter--Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] >> test.py::test[optimizers-sort_by_nonstrict_const--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window--Results] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-std] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[column_group-hint_append2--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail2--Results] [SKIPPED] >> test.py::test[column_group-publish-single-Results] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [GOOD] >> test.py::test[join-left_join_null_column--Results] [GOOD] >> test.py::test[join-left_null_literal--Results] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_public_api.py::TestAttributes::test_create_table >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-std] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test.py::test[optimizers-unused_columns_window--Results] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] >> test.py::test[join-left_null_literal--Results] [GOOD] >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> test.py::test[join-left_trivial-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi--Results] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-fifo] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-bush_dis_in_in_in--Results] >> test.py::test[count-count_all_grouped-empty-Results] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] [GOOD] >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test.py::test[join-lookupjoin_semi--Results] [GOOD] >> test.py::test[join-mapjoin_with_anonymous--Results] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-fifo] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-std] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] [GOOD] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Results] [SKIPPED] >> test.py::test[dq-precompute_asyncfile--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel_indep--Results] [SKIPPED] >> test.py::test[dq-precompute_tree-default.txt-Results] [SKIPPED] >> test.py::test[expr-double_join_with_list_from_range--Results] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-bush_dis_in_in_in--Results] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-ForceBlocks] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> test.py::test[join-mapjoin_with_anonymous--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Results] >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-premap_common_cross--Results] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> test.py::test[join-bush_dis_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--Results] >> test.py::test[join-emptyjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-inner_with_order--ForceBlocks] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_filter--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[expr-double_join_with_list_from_range--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[order_by-warn_offset_wo_sort--Results] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] [GOOD] >> test.py::test[join-inner_with_order--ForceBlocks] [GOOD] >> test.py::test[join-inner_with_order--Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] >> ttl_unavailable_s3.py::TestUnavailableS3::test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> unstable_connection.py::TestUnstableConnection::test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] >> test.py::test[join-inner_with_order--Results] [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--ForceBlocks] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert: got overload issue delete #0 ok delete #1 ok delete #2 ok delete #3 ok delete #4 ok delete #5 ok delete #6 ok delete #7 ok delete #8 ok delete #9 ok delete #10 ok delete #11 ok delete #12 ok delete #13 ok delete #14 ok delete #15 ok delete #16 ok delete #17 ok delete #18 ok delete #19 ok delete #20 ok delete #21 ok delete #22 ok delete #23 ok delete #24 ok delete #25 ok delete #26 ok delete #27 ok delete #28 ok delete #29 ok delete #30 ok delete #31 ok delete #32 ok delete #33 ok delete #34 ok delete #35 ok delete #36 ok delete #37 ok delete #38 ok delete #39 ok delete #40 ok delete #41 ok delete #42 ok delete #43 ok delete #44 ok delete #45 ok delete #46 ok delete #47 ok delete #48 ok delete #49 ok delete #50 ok delete #51 ok delete #52 ok delete #53 ok delete #54 ok delete #55 ok delete #56 ok delete #57 ok delete #58 ok delete #59 ok delete #60 ok delete #61 ok delete #62 ok delete #63 ok delete #64 ok delete #65 ok delete #66 ok delete #67 ok delete #68 ok delete #69 ok delete #70 ok delete #71 ok delete #72 ok delete #73 ok delete #74 ok delete #75 ok delete #76 ok delete #77 ok delete #78 ok delete #79 ok delete #80 ok delete #81 ok delete #82 ok delete #83 ok delete #84 ok delete #85 ok delete #86 ok delete #87 ok delete #88 ok delete #89 ok delete #90 ok delete #91 ok delete #92 ok delete #93 ok delete #94 ok delete #95 ok delete #96 ok delete #97 ok delete #98 ok delete #99 ok delete #100 ok delete #101 ok delete #102 ok delete #103 ok delete #104 ok delete #105 ok delete #106 ok delete #107 ok delete #108 ok delete #109 ok delete #110 ok delete #111 ok delete #112 ok delete #113 ok delete #114 ok delete #115 ok delete #116 ok delete #117 ok delete #118 ok delete #119 ok delete #120 ok delete #121 ok delete #122 ok delete #123 ok delete #124 ok delete #125 ok delete #126 ok delete #127 ok delete #128 ok delete #129 ok delete #130 ok delete #131 ok delete #132 ok delete #133 ok delete #134 ok delete #135 ok delete #136 ok delete #137 ok delete #138 ok delete #139 ok delete #140 ok delete #141 ok delete #142 ok delete #143 ok delete #144 ok delete #145 ok delete #146 ok delete #147 ok delete #148 ok delete #149 ok delete #150 ok delete #151 ok delete #152 ok delete #153 ok delete #154 ok delete #155 ok delete #156 ok delete #157 ok delete #158 ok delete #159 ok delete #160 ok delete #161 ok delete #162 ok delete #163 ok delete #164 ok delete #165 ok delete #166 ok delete #167 ok delete #168 ok delete #169 ok delete #170 ok delete #171 ok delete #172 ok delete #173 ok delete #174 ok delete #175 ok delete #176 ok delete #177 ok delete #178 ok delete #179 ok delete #180 ok delete #181 ok delete #182 ok delete #183 ok delete #184 ok delete #185 ok delete #186 ok delete #187 ok delete #188 ok delete #189 ok delete #190 ok delete #191 ok delete #192 ok delete #193 ok delete #194 ok delete #195 ok delete #196 ok delete #197 ok delete #198 ok delete #199 ok delete #200 ok delete #201 ok delete #202 ok delete #203 ok delete #204 ok delete #205 ok delete #206 ok delete #207 ok delete #208 ok delete #209 ok delete #210 ok delete #211 ok delete #212 ok delete #213 ok delete #214 ok delete #215 ok delete #216 ok delete #217 ok delete #218 ok delete #219 ok delete #220 ok delete #221 ok delete #222 ok delete #223 ok delete #224 ok delete #225 ok delete #226 ok delete #227 ok delete #228 ok delete #229 ok delete #230 ok delete #231 ok delete #232 ok delete #233 ok delete #234 ok delete #235 ok delete #236 ok delete #237 ok delete #238 ok delete #239 ok delete #240 ok delete #241 ok delete #242 ok delete #243 ok delete #244 ok delete #245 ok delete #246 ok delete #247 ok delete #248 ok delete #249 ok delete #250 ok delete #251 ok delete #252 ok delete #253 ok delete #254 ok delete #255 ok delete #256 ok delete #257 ok delete #258 ok delete #259 ok delete #260 ok delete #261 ok delete #262 ok delete #263 ok delete #264 ok delete #265 ok delete #266 ok delete #267 ok delete #268 ok delete #269 ok delete #270 ok delete #271 ok delete #272 ok delete #273 ok delete #274 ok delete #275 ok delete #276 ok delete #277 ok delete #278 ok delete #279 ok delete #280 ok delete #281 ok delete #282 ok delete #283 ok delete #284 ok delete #285 ok delete #286 ok delete #287 ok delete #288 ok delete #289 ok delete #290 ok delete #291 ok delete #292 ok delete #293 ok delete #294 ok delete #295 ok delete #296 ok delete #297 ok delete #298 ok delete #299 ok delete #300 ok delete #301 ok delete #302 ok delete #303 ok delete #304 ok delete #305 ok delete #306 ok delete #307 ok delete #308 ok delete #309 ok delete #310 ok delete #311 ok delete #312 ok delete #313 ok delete #314 ok delete #315 ok delete #316 ok delete #317 ok delete #318 ok delete #319 ok delete #320 ok delete #321 ok delete #322 ok delete #323 ok delete #324 ok delete #325 ok delete #326 ok delete #327 ok delete #328 ok delete #329 ok delete #330 ok delete #331 ok delete #332 ok delete #333 ok delete #334 ok delete #335 ok delete #336 ok delete #337 ok delete #338 ok delete #339 ok delete #340 ok delete #341 ok delete #342 ok delete #343 ok delete #344 ok delete #345 ok delete #346 ok delete #347 ok delete #348 ok delete #349 ok delete #350 ok delete #351 ok delete #352 ok delete #353 ok delete #354 ok delete #355 ok delete #356 ok delete #357 ok delete #358 ok delete #359 ok delete #360 ok delete #361 ok delete #362 ok delete #363 ok delete #364 ok delete #365 ok delete #366 ok delete #367 ok delete #368 ok delete #369 ok delete #370 ok delete #371 ok delete #372 ok delete #373 ok delete #374 ok delete #375 ok delete #376 ok delete #377 ok delete #378 ok delete #379 ok delete #380 ok delete #381 ok delete #382 ok delete #383 ok delete #384 ok delete #385 ok delete #386 ok delete #387 ok delete #388 ok delete #389 ok delete #390 ok delete #391 ok delete #392 ok delete #393 ok delete #394 ok delete #395 ok delete #396 ok delete #397 ok delete #398 ok delete #399 ok delete #400 ok delete #401 ok delete #402 ok delete #403 ok delete #404 ok delete #405 ok delete #406 ok delete #407 ok delete #408 ok delete #409 ok delete #410 ok delete #411 ok delete #412 ok delete #413 ok delete #414 ok delete #415 ok delete #416 ok delete #417 ok delete #418 ok delete #419 ok delete #420 ok delete #421 ok delete #422 ok delete #423 ok delete #424 ok delete #425 ok delete #426 ok delete #427 ok delete #428 ok delete #429 ok delete #430 ok delete #431 ok delete #432 ok delete #433 ok delete #434 ok delete #435 ok delete #436 ok delete #437 ok delete #438 ok delete #439 ok delete #440 ok delete #441 ok delete #442 ok delete #443 ok delete #444 ok delete #445 ok delete #446 ok delete #447 ok delete #448 ok delete #449 ok delete #450 ok delete #451 ok delete #452 ok delete #453 ok delete #454 ok delete #455 ok delete #456 ok delete #457 ok delete #458 ok delete #459 ok delete #460 ok delete #461 ok delete #462 ok delete #463 ok delete #464 ok delete #465 ok delete #466 ok delete #467 ok delete #468 ok delete #469 ok delete #470 ok delete #471 ok delete #472 ok delete #473 ok delete #474 ok delete #475 ok delete #476 ok delete #477 ok delete #478 ok delete #479 ok delete #480 ok delete #481 ok delete #482 ok delete #483 ok delete #484 ok delete #485 ok delete #486 ok delete #487 ok delete #488 ok delete #489 ok delete #490 ok delete #491 ok delete #492 ok delete #493 ok delete #494 ok delete #495 ok delete #496 ok delete #497 ok delete #498 ok delete #499 ok >> test.py::test[join-premap_common_inner_filter--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_public_api.py::TestDocApiTables::test_create_table >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/typing.py:395: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/error.py:6: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/protobuf/py3/google/protobuf/text_format.py:568: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[pg-join_using_tables4-default.txt-Results] [GOOD] >> test.py::test[pg-nulls_native-default.txt-Results] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field--Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-join_with_duplicate_keys_on_sorted--ForceBlocks] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> tier_delete.py::TestTierDelete::test_delete_s3_ttl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/logging/__init__.py:1651: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--ForceBlocks] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] >> test.py::test[flatten_by-flatten_one_field--Results] [GOOD] >> test.py::test[hor_join-max_in_tables--Results] >> test.py::test[pg-nulls_native-default.txt-Results] [GOOD] >> test.py::test[pg-point-default.txt-Results] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap--Results] >> data_correctness.py::TestDataCorrectness::test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.2%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] |92.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] >> test.py::test[pg-point-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-Results] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [GOOD] >> test_drain.py::TestHive::test_drain_on_stop |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[hor_join-max_in_tables--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_all--Results] [SKIPPED] >> test.py::test[hor_join-sorted_out--Results] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] [GOOD] >> test.py::test[join-pullup_context_dep--Results] >> test.py::test[join-mapjoin_opt_vs_2xopt--ForceBlocks] >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[pg-select_from_columns_star-default.txt-Results] [GOOD] >> test.py::test[pg-select_where-default.txt-Results] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> test.py::test[hor_join-sorted_out--Results] [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[in-in_types_cast_all-default.txt-Results] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-mapjoin_opt_vs_2xopt--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] >> test.py::test[pg-select_where-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test.py::test[join-pullup_context_dep--Results] [GOOD] >> test.py::test[join-pullup_left_semi--Results] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/typing.py:395: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/threading.py:135: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/threading.py:135: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--ForceBlocks] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test.py::test[pg-tpcds-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-Results] >> test.py::test[in-in_types_cast_all-default.txt-Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-Results] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] >> test.py::test[pg-tpcds-q68-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] >> test.py::test[join-mergejoin_big_primary_unique--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Results] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False >> test.py::test[join-pullup_left_semi--Results] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> test.py::test[in-in_with_tuple-default.txt-Results] [GOOD] >> test.py::test[in-yql-14677-default.txt-Results] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |92.5%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Results] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test.py::test[join-mergejoin_with_table_range--ForceBlocks] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.6%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.6%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] |92.6%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q72-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1063390) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tr ... e object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_drain.py::TestHive::test_drain_tablets |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> test.py::test[join-mergejoin_with_table_range--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Results] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_random-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key--Results] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-mergejoin_with_table_range--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--ForceBlocks] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--true-YDB] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--true] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[join-nopushdown_filter_over_inner--ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--Results] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[pg-tpcds-q87-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-Results] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-ForceBlocks] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |92.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:534: Enable after interactive tx support >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--false-YDB] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[pg-tpcds-q91-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q96-default.txt-Results] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] |92.9%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--false-YDB] |92.9%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_common_left_cross--ForceBlocks] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--false] >> test.py::test[pg-tpcds-q96-default.txt-Results] [GOOD] >> test.py::test[pg-wide_sort--Results] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [GOOD] >> test_user_administration.py::test_database_admin_can_create_user |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--false] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test.py::test[pg-wide_sort--Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail--Results] [SKIPPED] >> test.py::test[produce-fuse_reduces_diff_sets--Results] >> test.py::test[join-premap_common_left_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_left_cross--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_storage_config.py::TestStorageConfig::test_cases[case_9] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-other-admin] >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--true-YDB] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/177e/000a9c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-05-05T03:18:15.490374Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T03:18:15.490361Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T03:18:15.469776Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test.py::test[join-premap_common_left_cross--Results] [GOOD] |93.0%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] |93.1%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] |93.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[join-premap_common_left_cross--Results] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith27Cpu::test >> test.py::test[produce-fuse_reduces_diff_sets--Results] [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--true] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--true] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[produce-fuse_reduces_diff_sets--Results] [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithStorageNodeWith27Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--true-YDB] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |93.1%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--false] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_9] [GOOD] >> test_storage_config.py::TestStorageConfig::test_create_tablet >> test_storage_config.py::TestStorageConfig::test_create_tablet [GOOD] |93.1%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1084804) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/c ... st_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/pool.py:268: ResourceWarning: unclosed running multiprocessing pool ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith7Cpu::test >> test_storage_config.py::TestStorageConfig::test_cases[case_3] >> test_system_views.py::TestPartitionStats::test_case >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] >> test_actorsystem.py::TestWithComputeNodeWith7Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith28Cpu::test >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--false-YDB] >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] [GOOD] >> test_user_administration.py::test_database_admin_can_create_user [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-subgroup] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-subgroup] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-user] |93.2%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--true] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-user] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-admin-group] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--false] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-admin-group] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-himself] >> test_system_views.py::TestPartitionStats::test_case [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-himself] [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-other-admin] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-subgroup] |93.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part13/test-results/pytest/{meta.json ... results_accumulator.log} >> test_actorsystem.py::TestWithStorageNodeWith28Cpu::test [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-subgroup] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[rename-admin-group] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[rename-admin-group] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[block] >> test_actorsystem.py::TestWithComputeNodeWith8Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |93.2%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[block] [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--false] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[change-password] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[change-password] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_3] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_4] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--false-YDB] >> test_actorsystem.py::TestWithComputeNodeWith8Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_actorsystem.py::TestWithComputeNodeWith9Cpu::test |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--false] [GOOD] >> test_system_views.py::TestQueryMetrics::test_case |93.2%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_4] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_5] >> test_actorsystem.py::TestWithComputeNodeWith9Cpu::test [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [GOOD] |93.2%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--true] >> test_actorsystem.py::TestWithHybridNodeWith24Cpu::test >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--true-YDB] |93.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part13/test-results/pytest/{meta.json ... results_accumulator.log} >> test_actorsystem.py::TestWithHybridNodeWith10Cpu::test |93.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--false] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_5] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_6] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith24Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith10Cpu::test [GOOD] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] >> test_actorsystem.py::TestWithStorageNodeWith29Cpu::test >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteradmin-True] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith11Cpu::test >> test_storage_config.py::TestStorageConfig::test_cases[case_6] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_7] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |93.3%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--false-YDB] >> test_actorsystem.py::TestWithHybridNodeWith11Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] >> test_actorsystem.py::TestWithStorageNodeWith29Cpu::test [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--true-YDB] >> test_actorsystem.py::TestWithHybridNodeWith25Cpu::test >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--true] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_7] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_8] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d66/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d66/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1092756 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith25Cpu::test [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith16Cpu::test |93.3%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_storage_config.py::TestStorageConfig::test_cases[case_8] [GOOD] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] [GOOD] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |93.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithStorageNodeWith2Cpu::test |93.4%| [TA] $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} |93.4%| [TA] {RESULT} $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith16Cpu::test [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith2Cpu::test [GOOD] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_8] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--true] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--true] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] [XFAIL] >> test_actorsystem.py::TestWithComputeNodeWith32Cpu::test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-himself] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith26Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith30Cpu::test >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--true] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[change-password] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith17Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c6d/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c6d/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1178850 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--true-YDB] [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteradmin-True] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteruser-False] >> test_actorsystem.py::TestWithHybridNodeWith26Cpu::test [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteruser-False] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[dbadmin-True] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] >> test_auth_system_views.py::test_tenant_auth_groups_access[dbadmin-True] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[ordinaryuser-False] >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithComputeNodeWith32Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] >> test_actorsystem.py::TestWithComputeNodeWith17Cpu::test [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[ordinaryuser-False] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithStorageNodeWith30Cpu::test [GOOD] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] >> test_actorsystem.py::TestWithStorageNodeWith21Cpu::test |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--false] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith27Cpu::test >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[unblock] >> test_actorsystem.py::TestWithStorageNodeWith21Cpu::test [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith33Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith18Cpu::test >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--true] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--true] [XFAIL] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith18Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith27Cpu::test [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--true] >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--true-YDB] >> test_actorsystem.py::TestWithComputeNodeWith19Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith33Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith28Cpu::test |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] >> test_actorsystem.py::TestWithStorageNodeWith22Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith19Cpu::test [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith22Cpu::test [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--false-YDB] >> test_actorsystem.py::TestWithComputeNodeWith1Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith28Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c58/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c58/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1197935 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--true] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithComputeNodeWith1Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--true] >> test_actorsystem.py::TestWithStorageNodeWith10Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith23Cpu::test >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithComputeNodeWith34Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--true-YDB] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[unblock] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_owner >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithStorageNodeWith10Cpu::test [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_owner [GOOD] >> test_user_administration.py::test_user_can_change_password_for_himself[dbadmin] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] >> test_user_administration.py::test_user_can_change_password_for_himself[dbadmin] [GOOD] >> test_user_administration.py::test_user_can_change_password_for_himself[ordinaryuser] >> test_actorsystem.py::TestWithStorageNodeWith23Cpu::test [GOOD] >> test_user_administration.py::test_user_can_change_password_for_himself[ordinaryuser] [GOOD] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_0] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_create_users.py::test_create_user >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--true] >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith34Cpu::test [GOOD] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c48/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c48/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1211795 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_actorsystem.py::TestWithStorageNodeWith11Cpu::test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] >> data_correctness.py::TestDataCorrectness::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith24Cpu::test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith11Cpu::test [GOOD] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith12Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith35Cpu::test >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--false] >> test_create_users.py::test_create_user [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_0] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_10] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithStorageNodeWith24Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith12Cpu::test [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--false-YDB] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--true] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c3e/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c3e/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1229598 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith13Cpu::test >> test_storage_config.py::TestStorageConfig::test_cases[case_10] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_11] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test [GOOD] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_actorsystem.py::TestWithStorageNodeWith13Cpu::test [GOOD] >> test_create_users_strict_acl_checks.py::test_create_user >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] >> test_actorsystem.py::TestWithComputeNodeWith35Cpu::test [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith25Cpu::test |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--false] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] >> test_system_views.py::TestQueryMetrics::test_case [GOOD] >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d67/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d67/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1092778 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_storage_config.py::TestStorageConfig::test_cases[case_11] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_12] >> test_actorsystem.py::TestWithStorageNodeWith14Cpu::test >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--true-YDB] >> test_system_views.py::TestQueryMetricsUniqueQueries::test_case |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith25Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c28/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c28/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1242470 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_actorsystem.py::TestWithStorageNodeWith14Cpu::test [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [GOOD] >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--false] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_12] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_1] >> test_create_users_strict_acl_checks.py::test_create_user [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c2c/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c2c/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1239863 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--true] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_1] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_2] >> TFlatTest::Init >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> TFlatTest::Mix_DML_DDL >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith36Cpu::test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--false-YDB] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-05-05T03:20:13.865480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796226708713818:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:13.865723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000ce8/r3tmp/tmpP3UYG8/pdisk_1.dat 2025-05-05T03:20:13.928880Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8101 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:13.960658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:13.964629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:14.002616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:14.002673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:14.003664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:14.028915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 waiting... 2025-05-05T03:20:14.034707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-05-05T03:20:14.042652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:14.049596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d44/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d44/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1097117 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test [GOOD] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--true] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [GOOD] >> TFlatTest::LargeDatashardReply [GOOD] >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-05-05T03:20:13.745821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796227261429627:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:13.745838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003df/r3tmp/tmpVEsGmQ/pdisk_1.dat 2025-05-05T03:20:13.811497Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5885 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T03:20:13.836161Z node 1 :TX_PROXY DEBUG: actor# [1:7500796227261429870:2102] Handle TEvNavigate describe path dc-1 2025-05-05T03:20:13.836193Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430147:2251] HANDLE EvNavigateScheme dc-1 2025-05-05T03:20:13.836499Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430147:2251] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-05T03:20:13.843506Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430147:2251] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-05-05T03:20:13.844988Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430147:2251] Handle TEvDescribeSchemeResult Forward to# [1:7500796227261430146:2250] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:13.848083Z node 1 :TX_PROXY DEBUG: actor# [1:7500796227261429870:2102] Handle TEvProposeTransaction 2025-05-05T03:20:13.848097Z node 1 :TX_PROXY DEBUG: actor# [1:7500796227261429870:2102] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-05T03:20:13.886354Z node 1 :TX_PROXY DEBUG: actor# [1:7500796227261429870:2102] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-05T03:20:13.886587Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-05T03:20:13.886603Z node 1 :TX_PROXY DEBUG: actor# [1:7500796227261429870:2102] TxId# 281474976715657 ProcessProposeTransaction 2025-05-05T03:20:13.886634Z node 1 :TX_PROXY DEBUG: actor# [1:7500796227261429870:2102] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7500796227261430164:2260] 2025-05-05T03:20:13.886924Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:7500796227261429858:2099] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-05-05T03:20:13.886958Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-05-05T03:20:13.886993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:13.886997Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-05T03:20:13.886999Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T03:20:13.887002Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-05T03:20:13.887003Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T03:20:13.887022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:13.887057Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-05T03:20:13.887066Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-05T03:20:13.887069Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-05T03:20:13.887076Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-05T03:20:13.887115Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-05-05T03:20:13.887791Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-05-05T03:20:13.887811Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:7500796227261429858:2099]) 2025-05-05T03:20:13.887827Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-05T03:20:13.887980Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-05-05T03:20:13.887990Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7500796227261429858:2099])::Execute 2025-05-05T03:20:13.887993Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T03:20:13.888000Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7500796227261429858:2099])::Complete 2025-05-05T03:20:13.888024Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 1746415213747433 ResourceMaximum { Memory: 202797649920 } 2025-05-05T03:20:13.888030Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-05-05T03:20:13.888032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:13.888061Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-05-05T03:20:13.888065Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-05T03:20:13.888066Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T03:20:13.888095Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-05T03:20:13.888097Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-05T03:20:13.888099Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-05T03:20:13.888101Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-05T03:20:13.888821Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-05-05T03:20:13.888832Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-05T03:20:13.898109Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-05-05T03:20:13.898140Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-05T03:20:13.898158Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-05T03:20:13.898341Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-05T03:20:13.898387Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-05T03:20:13.898405Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-05T03:20:13.898466Z node 1 :TX_PROXY DEBUG: Actor# [1:7500796227261430164:2260] txid# 281474976715657 HANDLE EvClientConnected 2025-05-05T03:20:13.899024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:20:13.899091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:13.899169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-05T03:20:13.899232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:20:13.899250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03 ... NKikimr::TEvDataShard::TEvProposeTransactionResult> complete, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T03:20:14.082414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T03:20:14.082436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715674 2025-05-05T03:20:14.082446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715674 2025-05-05T03:20:14.082447Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715674 datashard 72075186224037899 state PreOffline 2025-05-05T03:20:14.082450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715674 2025-05-05T03:20:14.082456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T03:20:14.082459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715674:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T03:20:14.082471Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2025-05-05T03:20:14.082518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-05-05T03:20:14.082553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715674:0 progress is 1/1 2025-05-05T03:20:14.082561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-05T03:20:14.082563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715674:0 progress is 1/1 2025-05-05T03:20:14.082564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-05T03:20:14.082567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715674, ready parts: 1/1, is published: true 2025-05-05T03:20:14.082575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7500796231556398478:2386] message: TxId: 281474976715674 2025-05-05T03:20:14.082578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-05T03:20:14.082581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715674:0 2025-05-05T03:20:14.082583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715674:0 2025-05-05T03:20:14.082598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-05-05T03:20:14.083402Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:20:14.083423Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-05-05T03:20:14.083691Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:20:14.083796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796231556398049 RawX2: 4503603922340126 } TabletId: 72075186224037899 State: 4 2025-05-05T03:20:14.083831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:20:14.083933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:14.083940Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-05-05T03:20:14.083978Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-05-05T03:20:14.083990Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-05-05T03:20:14.084009Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-05-05T03:20:14.084036Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-05-05T03:20:14.084061Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-05-05T03:20:14.084924Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7500796227261429858:2099] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7500796227261430053:2194] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-05-05T03:20:14.084968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-05-05T03:20:14.085002Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-05-05T03:20:14.085010Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-05-05T03:20:14.085017Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-05-05T03:20:14.085023Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-05-05T03:20:14.085045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-05-05T03:20:14.085093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:20:14.085101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-05-05T03:20:14.085102Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-05-05T03:20:14.085109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:20:14.085128Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-05-05T03:20:14.085161Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-05-05T03:20:14.085171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-05-05T03:20:14.085197Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-05-05T03:20:14.085236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-05-05T03:20:14.085247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-05-05T03:20:14.085257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:20:14.085944Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-05-05T03:20:14.218434Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796232347380026:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:14.218555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003df/r3tmp/tmpcEnw74/pdisk_1.dat 2025-05-05T03:20:14.232701Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11331 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:14.322148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:14.322177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:14.322622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:14.323127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:14.326593Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:14.334919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:17.326892Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T03:20:17.336395Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T03:20:17.337042Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-05-05T03:20:17.346756Z node 2 :TX_PROXY ERROR: Actor# [2:7500796240937320830:5880] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> test_storage_config.py::TestStorageConfig::test_cases[case_2] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_create_users_strict_acl_checks.py::test_create_user [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--true-YDB] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |93.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs [GOOD] >> TObjectStorageListingTest::TestFilter >> test_actorsystem.py::TestWithComputeNodeWith36Cpu::test [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards |93.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> TObjectStorageListingTest::TestSkipShards [GOOD] |93.9%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test.py::test_plans[column] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-05-05T03:20:22.406485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796265512947062:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:22.406508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cc0/r3tmp/tmpyp3PhT/pdisk_1.dat 2025-05-05T03:20:22.461978Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1596, node 1 2025-05-05T03:20:22.475713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:20:22.475728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:20:22.475730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:20:22.475765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13971 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:22.509106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:22.509140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:22.510314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:22.539348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:22.546914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cc0/r3tmp/tmpS0sujK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20538, node 2 TClient is connected to server localhost:18688 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_2] [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--true-YDB] [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c1c/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c1c/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1262041 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test_run_benchmark[scan-column] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> TLocksTest::Range_IncorrectDot1 >> TFlatTest::ShardUnfreezeNonFrozen >> TObjectStorageListingTest::Listing >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [GOOD] >> test.py::test_run_determentistic[column] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-05-05T03:20:27.434060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796285339576463:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:27.434128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003b7/r3tmp/tmpXAZNN0/pdisk_1.dat 2025-05-05T03:20:27.482093Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16855 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:27.510448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.513224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.535713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:27.535737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:27.536822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:27.573854Z node 1 :TX_PROXY ERROR: Actor# [1:7500796285339577146:2358] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-05T03:20:27.807560Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796287657978541:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:27.807709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003b7/r3tmp/tmpAlSdcY/pdisk_1.dat 2025-05-05T03:20:27.816811Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31517 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:27.910985Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:27.911010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:27.911276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.912265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:27.912988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.926581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.937112Z node 2 :TX_PROXY ERROR: Actor# [2:7500796287657979246:2388] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-05-05T03:20:27.937870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.943675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... >> test_ttl.py::TestTTLAlterSettings::test_case >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [GOOD] >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--false-YDB] [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test_plans[column] [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> TLocksTest::Range_IncorrectDot2 [GOOD] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c12/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c12/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1267501 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-05-05T03:20:27.314806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796286632242162:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:27.314890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmpUrQOcX/pdisk_1.dat 2025-05-05T03:20:27.367760Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28173 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:27.414598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:27.414630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:27.415651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:27.443842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:27.453013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.514933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.522982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.771341Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796287243032037:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:27.771614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmptCI6NV/pdisk_1.dat 2025-05-05T03:20:27.781482Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30591 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:27.874815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:27.874841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:27.875192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.875950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:27.879726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.895007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.908919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:28.199992Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796289530114564:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:28.200017Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmpHxBSrX/pdisk_1.dat 2025-05-05T03:20:28.218560Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26473 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:28.304992Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:28.305033Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:28.305423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:28.306004Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:28.306614Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:28.314809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:28.329998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:28.343884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:28.686673Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796290909795555:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:28.686869Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmp7aR1iU/pdisk_1.dat 2025-05-05T03:20:28.699074Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2083 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:28.790871Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:28.790906Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:28.791349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:28.791876Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:28.794313Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, u ... PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:30.289928Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:30.289982Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:30.290389Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:30.291060Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:30.294725Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:30.307032Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:30.317369Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:20:30.332615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmppkR6x9/pdisk_1.dat 2025-05-05T03:20:30.693555Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796301266877234:2220];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:30.717057Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:20:30.717322Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19283 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:30.793460Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:30.793502Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:30.793831Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:30.797506Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:30.799131Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:30.807885Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:20:30.825392Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:30.837163Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.182908Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796303959815625:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:31.182936Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmpdtNJN5/pdisk_1.dat 2025-05-05T03:20:31.198988Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10299 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:31.287873Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:31.287911Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:31.288392Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.289451Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:31.296796Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.312302Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.325584Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.670314Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796302920060419:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:31.670360Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f8/r3tmp/tmprX2C50/pdisk_1.dat 2025-05-05T03:20:31.683807Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14188 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:31.774405Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:31.774436Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:31.774796Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:31.775332Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:31.778633Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.786070Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.842744Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:31.857861Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> TLocksTest::GoodSameKeyLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [GOOD] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test.py::test_run_benchmark[scan-row] >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c05/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000c05/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1272947 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [GOOD] |94.3%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] |94.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TLocksTest::Range_BrokenLock2 |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> TFlatTest::SelectRangeNullArgs3 >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state >> TLocksTest::GoodSameShardLock [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] >> TFlatTest::SelectRangeNullArgs4 [GOOD] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-05-05T03:20:33.060110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796312325276761:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:33.060128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmp3OyiXe/pdisk_1.dat 2025-05-05T03:20:33.113707Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:33.160641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:33.160677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:33.161708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:33.191476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.200275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.223329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.235952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.547289Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796314991899325:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:33.547310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmpfJwUvG/pdisk_1.dat 2025-05-05T03:20:33.563765Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7339 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:33.653198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:33.653235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:33.653706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:33.654337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:33.658777Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.671494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.690211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:33.703408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:34.029802Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796316895583836:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmpelC2OZ/pdisk_1.dat 2025-05-05T03:20:34.036923Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:20:34.046761Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2890 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:34.134597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:34.134624Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:34.134973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:34.136327Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:34.146620Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:34.157934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:34.169335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:34.185947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmpGHnLay/pdisk_1.dat 2025-05-05T03:20:34.552792Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:20:34.552989Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:2698 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:34.640375Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:34.640398Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:34.640724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:34.641367Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:34.646519Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at ... ersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:36.462579Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:36.462608Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:36.463002Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:36.466531Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:36.466767Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:36.480892Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:36.483255Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:36.506718Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:20:36.529656Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmpWlhIaN/pdisk_1.dat 2025-05-05T03:20:36.942316Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:20:36.954804Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2372 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:37.030539Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:37.030573Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:37.030921Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:37.031395Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:37.038167Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:37.046437Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:37.047724Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:37.082383Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:37.107810Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:37.443043Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796331668440674:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:37.443710Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmpe8XttV/pdisk_1.dat 2025-05-05T03:20:37.463954Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17977 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:37.552101Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:37.552134Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:37.552516Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:37.553113Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:37.561392Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:37.576435Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:37.591133Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:20:37.940641Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796332120291756:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:37.942274Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c89/r3tmp/tmp2CW8y9/pdisk_1.dat 2025-05-05T03:20:37.957653Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30094 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:38.045132Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:38.045174Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:38.045551Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.046172Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:38.048138Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.050396Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:38.051605Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:38.067404Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.085373Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-05-05T03:20:38.223788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796333533927875:2079];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:38.224010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c79/r3tmp/tmpoXZOyD/pdisk_1.dat 2025-05-05T03:20:38.302945Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19932 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:38.368391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:38.368417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:38.369095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:38.371805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:38.377015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:38.382581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:38.384012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:20:38.754290Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796334983927870:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:38.754308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c79/r3tmp/tmpzxpq6M/pdisk_1.dat 2025-05-05T03:20:38.779300Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17543 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:38.868594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:38.868627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:38.868965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.870947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:38.874772Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.884721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> TLocksTest::Range_BrokenLock3 [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-05-05T03:20:37.935970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796330062393729:2269];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:37.936051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmpFxjcOc/pdisk_1.dat 2025-05-05T03:20:38.016396Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21175 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:20:38.037042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:38.037066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:38.038125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:38.077177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.082847Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.085744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.086904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:20:38.116690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:38.128689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:20:38.398532Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796336417993041:2080];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:38.403718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmpivmbDF/pdisk_1.dat 2025-05-05T03:20:38.427867Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12824 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:38.499363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:38.499386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:38.499711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.500945Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.502287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:38.506130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.520384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.535199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.871794Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796333393582219:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:38.871871Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmpdQgGak/pdisk_1.dat 2025-05-05T03:20:38.885051Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1848 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:38.976161Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:38.976186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:38.976555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.978318Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:38.978763Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:38.984414Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:38.985532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:39.000023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:39.022590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:39.403570Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmpYUK5ut/pdisk_1.dat 2025-05-05T03:20:39.430463Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21244 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:39.503013Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:39.503043Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:2 ... 1615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:41.118546Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:41.118575Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:41.118908Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:41.120150Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:41.124551Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:41.138642Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:41.155078Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:41.174519Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-05-05T03:20:41.191074Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:41.211513Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:20:41.635918Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796346272382000:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:41.636597Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmpiKwxj9/pdisk_1.dat 2025-05-05T03:20:41.664104Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15726 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:41.742709Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:41.742735Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:41.743124Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:41.746833Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:20:41.748364Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:41.759064Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:41.785615Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:41.800476Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmpsyl9zn/pdisk_1.dat 2025-05-05T03:20:42.201272Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:20:42.222437Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22987 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:42.299060Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:42.299094Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:42.299473Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:42.300015Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:42.306772Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:42.314846Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:42.319877Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:42.335060Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:42.347180Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:42.731544Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796353252306836:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:42.731565Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0003ad/r3tmp/tmp2hPmmo/pdisk_1.dat 2025-05-05T03:20:42.746412Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5085 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:42.834985Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:42.835016Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:42.835750Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:42.837140Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:20:42.837705Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:42.846839Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:42.869563Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:42.883315Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> TFlatTest::WriteSplitByPartialKeyAndRead |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> test.py::test_run_determentistic[row] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-05-05T03:20:44.407382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796358670783300:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:44.407720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c70/r3tmp/tmpOoHLSL/pdisk_1.dat 2025-05-05T03:20:44.479924Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13726 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:44.550507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:44.550534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:44.551280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:44.552820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:44.554313Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:44.579032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415244679 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-05-05T03:20:44.663355Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:20:44.663687Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T03:20:44.663699Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T03:20:44.674981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976715668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:20:44.675038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } waiting... 2025-05-05T03:20:44.675121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:20:44.675132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:20:44.675179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:20:44.675217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-05-05T03:20:44.675222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:20:44.675470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715668, response: Status: StatusAccepted TxId: 281474976715668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-05T03:20:44.675484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-05-05T03:20:44.675519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-05-05T03:20:44.675525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-05T03:20:44.675585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:20:44.675607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:20:44.675693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-05-05T03:20:44.675728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T03:20:44.675738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715668, partId: 0, tablet: 72057594037968897 2025-05-05T03:20:44.675743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:2, partId: 0 2025-05-05T03:20:44.675745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T03:20:44.676030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715668, at schemeshard: 72057594046644480 2025-05-05T03:20:44.676044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 0/1, is published: true 2025-05-05T03:20:44.676048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715668, at schemeshard: 72057594046644480 2025-05-05T03:20:44.679492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-05T03:20:44.679512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:2, partId: 0 2025-05-05T03:20:44.679550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-05T03:20:44.679560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-05-05T03:20:44.679578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-05T03:20:44.679659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T03:20:44.679662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T03:20:44.679671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T03:20:44.679673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-05-05T03:20:44.679677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T03:20:44.679684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for tx ... rdStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:20:45.595964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:45.595983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:45.596007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:20:45.596056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T03:20:45.596096Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T03:20:45.596103Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T03:20:45.596104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:20:45.596242Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T03:20:45.596507Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-05T03:20:45.596510Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T03:20:45.596722Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-05T03:20:45.596779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:45.596795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:45.596800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:45.596805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:45.596318Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T03:20:45.596351Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T03:20:45.596589Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T03:20:45.596619Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T03:20:45.596880Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:20:45.596886Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:20:45.596888Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-05T03:20:45.596891Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-05T03:20:45.600933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:20:45.600945Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:20:45.600953Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T03:20:45.600986Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:20:45.601001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:20:45.601053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:20:45.601071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:20:45.601087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:20:45.601103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:20:45.601117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:20:45.601132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T03:20:45.601148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:20:45.601164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T03:20:45.601166Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:20:45.601177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:20:45.601179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:20:45.601189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:20:45.601212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:20:45.601217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T03:20:45.601228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:20:45.601266Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T03:20:45.601267Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:20:45.601270Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:20:45.601273Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:20:45.601275Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-05T03:20:45.601277Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:20:45.601381Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T03:20:45.601394Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7500796363422822031:2706], serverId# [2:7500796363422822032:2707], sessionId# [0:0:0] 2025-05-05T03:20:45.601442Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:20:45.601462Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T03:20:45.601493Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:20:45.601615Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:20:45.601632Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T03:20:45.602007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:20:45.602012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:20:45.602021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:20:45.602023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:20:45.602026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:20:45.602028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:20:45.602031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:20:45.602034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T03:20:45.602037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T03:20:45.602040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T03:20:45.602133Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-05T03:20:45.602136Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-05T03:20:45.602139Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-05T03:20:45.602141Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-05T03:20:45.602157Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:20:45.602326Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T03:20:45.602343Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500796363422822584:3121], serverId# [3:7500796363336519406:2451], sessionId# [0:0:0] 2025-05-05T03:20:45.602379Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:20:45.602393Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T03:20:45.602619Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-05T03:20:45.602624Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7500796363422822586:3123], serverId# [3:7500796363336519407:2452], sessionId# [0:0:0] 2025-05-05T03:20:45.602635Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:20:45.602856Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:20:45.603050Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:20:45.603255Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T03:20:45.603424Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T03:20:45.603436Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-05-05T03:20:45.602565Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-05T03:20:45.602760Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |94.5%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |94.5%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] >> TLocksTest::UpdateLockedKey >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:20:49.708677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:20:49.708714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:20:49.708719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:20:49.708724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:20:49.708736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:20:49.708741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:20:49.708751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:20:49.708768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:20:49.708865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:20:49.708960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:20:49.736375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:20:49.736406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:20:49.753791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:20:49.761789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:20:49.761868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:20:49.772167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:20:49.772258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:20:49.772372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:20:49.772649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:20:49.773512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:20:49.773845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:20:49.773860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:20:49.773884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:20:49.773893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:20:49.773899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:20:49.773953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.779768Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:20:49.805771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:20:49.805864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.805946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:20:49.806031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:20:49.806046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.807898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:20:49.807940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:20:49.808025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.808052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:20:49.808058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:20:49.808064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:20:49.808758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.808775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:20:49.808781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:20:49.809223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.809236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.809242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:20:49.809250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:20:49.810008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:20:49.810485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:20:49.810536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:20:49.810753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:20:49.810786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:20:49.810812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:20:49.810878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:20:49.810885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:20:49.810919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:20:49.810932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:20:49.811696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:20:49.811706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:20:49.811758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:20:49.811765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:20:49.811843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:20:49.811851Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:20:49.811864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:20:49.811868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:20:49.811874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:20:49.811877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:20:49.811882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:20:49.811887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:20:49.811892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:20:49.811895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:20:49.811908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:20:49.811914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:20:49.811918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:20:49.812283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:20:49.812302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... : 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 REQUEST: PUT /data_01.csv HTTP/1.1 HEADERS: Host: localhost:26856 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7E8DFF6C-0544-4FEC-84DF-DB9CDA8F332B amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 2025-05-05T03:20:50.004538Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-05-05T03:20:50.004569Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:478:2434], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-05-05T03:20:50.004576Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:478:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:20:50.004712Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2430] 2025-05-05T03:20:50.004731Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2431], sender# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T03:20:50.004752Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:477:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:26856 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F3781375-A817-454A-A20A-556948988FCC amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-05-05T03:20:50.010617Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-05-05T03:20:50.010649Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2431], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T03:20:50.010830Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:20:50.012596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:20:50.012619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T03:20:50.012709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:20:50.012716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T03:20:50.012879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:20:50.012890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:20:50.013249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:20:50.013266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T03:20:50.013270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T03:20:50.013276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T03:20:50.013283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-05T03:20:50.013301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T03:20:50.026922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-05T03:20:50.049809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:20:50.049849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-05-05T03:20:50.049887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:20:50.049903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:20:50.049924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:20:50.049998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:20:50.050177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:20:50.050184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T03:20:50.050219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:20:50.050229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:20:50.050236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:20:50.050242Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:20:50.050247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T03:20:50.050252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T03:20:50.050258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T03:20:50.050278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:20:50.056033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:20:50.056274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:20:50.056428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T03:20:50.056444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T03:20:50.056467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:20:50.056471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:20:50.056477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T03:20:50.056480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:20:50.056486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T03:20:50.056514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2336] message: TxId: 102 2025-05-05T03:20:50.056523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T03:20:50.056529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T03:20:50.056535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T03:20:50.056567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T03:20:50.057584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:20:50.057601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:452:2411] TestWaitNotification: OK eventTxId 102 |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] >> TLocksTest::CK_GoodLock >> TFlatTest::CopyCopiedTableAndRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-05-05T03:20:49.820175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796383433175046:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:49.820234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000271/r3tmp/tmpmp1gBj/pdisk_1.dat 2025-05-05T03:20:49.889134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:20:49.921102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:49.921134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:49.922288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3278 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:49.967929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:49.977853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:20:49.979182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.009592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.024818Z node 1 :TX_PROXY ERROR: Actor# [1:7500796387728142915:2385] txid# 281474976715660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-05T03:20:50.025414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.029886Z node 1 :TX_PROXY ERROR: Actor# [1:7500796387728142955:2419] txid# 281474976715662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000271/r3tmp/tmptF7JlE/pdisk_1.dat 2025-05-05T03:20:50.338334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:20:50.343994Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21787 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:50.427030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:50.427058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:50.427482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:50.428006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:50.434387Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:20:50.442823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.474933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.489494Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715660: 2025-05-05T03:20:50.490697Z node 2 :TX_PROXY ERROR: Actor# [2:7500796386371682167:2393] txid# 281474976715660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T03:20:50.490733Z node 2 :TX_PROXY ERROR: Actor# [2:7500796386371682167:2393] txid# 281474976715660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T03:20:50.490737Z node 2 :TX_PROXY ERROR: Actor# [2:7500796386371682167:2393] txid# 281474976715660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T03:20:50.494714Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715661: 2025-05-05T03:20:50.494924Z node 2 :TX_PROXY ERROR: Actor# [2:7500796386371682175:2398] txid# 281474976715661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T03:20:50.494948Z node 2 :TX_PROXY ERROR: Actor# [2:7500796386371682175:2398] txid# 281474976715661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T03:20:50.494951Z node 2 :TX_PROXY ERROR: Actor# [2:7500796386371682175:2398] txid# 281474976715661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T03:20:50.498131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] >> TLocksTest::SetLockNothing [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-05-05T03:20:51.569790Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796389928624678:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:51.570235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000394/r3tmp/tmpTq9bbg/pdisk_1.dat 2025-05-05T03:20:51.659031Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5162 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:51.694177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.704048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.711551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.725769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:51.725803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:51.726940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:51.790108Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:20:51.790860Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:20:51.800942Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:20:51.801055Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-05-05T03:20:51.842600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:20:51.842707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-05-05T03:20:51.842859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T03:20:51.842874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-05T03:20:51.842876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:20:51.842884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:20:51.842888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 waiting... 2025-05-05T03:20:51.843319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T03:20:51.843364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:20:51.843628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:20:51.843638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T03:20:51.843911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-05T03:20:51.843940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-05T03:20:51.843995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T03:20:51.843998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T03:20:51.844032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-05T03:20:51.844046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T03:20:51.844050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500796389928625161:2239], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-05-05T03:20:51.844054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500796389928625161:2239], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-05-05T03:20:51.844060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-05-05T03:20:51.844070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-05T03:20:51.844161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:20:51.844181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:20:51.844783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T03:20:51.844805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T03:20:51.844808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-05-05T03:20:51.844811Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-05T03:20:51.844815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T03:20:51.844865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T03:20:51.844903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T03:20:51.844904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-05-05T03:20:51.844906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-05-05T03:20:51.844908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-05-05T03:20:51.844919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-05-05T03:20:51.845063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T03:20:51.845083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-05T03:20:51.845094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-05-05T03:20:51.845099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T03:20:51.845101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-05-05T03:20:51.845119Z nod ... Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:20:52.825208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796396411404274 RawX2: 4503612512274751 } TabletId: 72075186224037893 State: 4 2025-05-05T03:20:52.825218Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825282Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:52.825415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:20:52.825515Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:20:52.825563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T03:20:52.825596Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:20:52.825628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T03:20:52.825642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T03:20:52.825646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:20:52.825650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:20:52.825719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:20:52.825722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T03:20:52.825729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:20:52.825730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:20:52.825733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:20:52.825734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:20:52.825736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:20:52.825738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:20:52.825743Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:20:52.825813Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037893 state Offline 2025-05-05T03:20:52.825846Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T03:20:52.825854Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T03:20:52.825859Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7500796396411403924:2384], serverId# [3:7500796396411403925:2385], sessionId# [0:0:0] 2025-05-05T03:20:52.825863Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T03:20:52.825866Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7500796396411403934:2391], serverId# [3:7500796396411403935:2392], sessionId# [0:0:0] 2025-05-05T03:20:52.825869Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T03:20:52.826012Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-05T03:20:52.826019Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-05T03:20:52.826021Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-05T03:20:52.826023Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-05T03:20:52.826051Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:7500796396411404115:2510], serverId# [3:7500796396411404131:2517], sessionId# [0:0:0] 2025-05-05T03:20:52.826103Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:20:52.826137Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T03:20:52.826516Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T03:20:52.826539Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T03:20:52.826632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-05T03:20:52.826681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 5] was 2 2025-05-05T03:20:52.826819Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:20:52.826830Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T03:20:52.827131Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:20:52.827141Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T03:20:52.827406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T03:20:52.827412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-05T03:20:52.827421Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037893 reason = ReasonStop 2025-05-05T03:20:52.827427Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [3:7500796396411404351:2673], serverId# [3:7500796396411404352:2674], sessionId# [0:0:0] 2025-05-05T03:20:52.827430Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [3:7500796396411404444:2741], serverId# [3:7500796396411404446:2743], sessionId# [0:0:0] 2025-05-05T03:20:52.827479Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037893 2025-05-05T03:20:52.827488Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-05-05T03:20:52.827489Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-05T03:20:52.827529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796396411404269 RawX2: 4503612512274750 } TabletId: 72075186224037892 State: 4 2025-05-05T03:20:52.827536Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:20:52.827637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:20:52.827691Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-05T03:20:52.828644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T03:20:52.828666Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-05T03:20:52.828728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 5] was 1 2025-05-05T03:20:52.828763Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T03:20:52.828778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:20:52.828787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 5], at schemeshard: 72057594046644480 2025-05-05T03:20:52.828793Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-05-05T03:20:52.828797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:20:52.828826Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-05T03:20:52.828878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T03:20:52.828887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T03:20:52.828931Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Check that tablet 72075186224037893 was deleted Check that tablet 72075186224037888 was deleted Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2025-05-05T03:20:53.126349Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) 2025-05-05T03:20:53.126582Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) 2025-05-05T03:20:53.126692Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-05-05T03:20:53.126797Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-05-05T03:20:53.126915Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-05-05T03:20:53.127025Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-05-05T03:20:50.233116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796385618225335:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:50.233134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmp70Dp4Q/pdisk_1.dat 2025-05-05T03:20:50.334484Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:20:50.338348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:50.338370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:50.342495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19016 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:50.400258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.406352Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.414305Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:50.417098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.443650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.459648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.745090Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796385598690326:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:50.745723Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmpuygagb/pdisk_1.dat 2025-05-05T03:20:50.766493Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29815 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:50.848118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:50.848151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:50.848485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.849895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:50.850294Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.853917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.882699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:50.898968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.271283Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796391305246535:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:51.272085Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmpFj8x31/pdisk_1.dat 2025-05-05T03:20:51.292529Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25982 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:51.377270Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:51.377299Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:51.377617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:51.378901Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:51.398342Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:51.414464Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:51.416952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:20:51.440528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:51.452748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:20:51.766500Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796388752758572:2087];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:51.766737Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmpKjRnNZ/pdisk_1.dat 2025-05-05T03:20:51.779385Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23215 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:51.867243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:51.867270Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:51.867621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:51.869146Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:51.870487Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.876891Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:51.882321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.902341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.916044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.279238Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7500796395305347670:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:52.279535Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmp9GFquM/pdisk_1.dat 2025-05-05T03:20:52.290000Z node 5 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27462 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:52.382980Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:52.383009Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:52.383338Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:52.385101Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:52.386625Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.394376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:52.410717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:20:52.426736Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.758127Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500796395930077231:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmpHP7CrH/pdisk_1.dat 2025-05-05T03:20:52.766384Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:20:52.778398Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25821 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:52.860751Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:52.860786Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:52.861152Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.861656Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:52.862355Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.879529Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.937232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.948067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:53.258619Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500796401027166805:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:53.258721Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002f3/r3tmp/tmpqmsNbd/pdisk_1.dat 2025-05-05T03:20:53.273031Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20184 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:53.362310Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:53.362349Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:53.362668Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:53.364011Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:53.364188Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:53.374133Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:53.388942Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:53.404907Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] >> TLocksTest::CK_BrokenLock [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-05-05T03:20:51.507393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796391666300113:2077];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:51.507613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmpIiOmu9/pdisk_1.dat 2025-05-05T03:20:51.588475Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:20:51.608400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:51.608430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:51.609504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24434 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:51.667867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.672368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.680670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.704909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.726347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:51.740260Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-05-05T03:20:51.986628Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796388832642611:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:51.986729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmpWAy2kS/pdisk_1.dat 2025-05-05T03:20:52.001879Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26491 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:52.083975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:52.084005Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:52.084339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:52.085818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:52.089254Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.098674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.111884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.125338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.404547Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796393399083121:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:52.404751Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmppoYozO/pdisk_1.dat 2025-05-05T03:20:52.418763Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19045 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:52.507010Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:52.507037Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:52.507358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.508553Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:20:52.512927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:20:52.529960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:52.542854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmpgOsIk9/pdisk_1.dat 2025-05-05T03:20:52.907883Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:20:52.921636Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3729 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:53.006412Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:53.006437Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:53.006759Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:53.007908Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) Volati ... { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:54.836060Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:54.836087Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:54.836409Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:54.837105Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:54.838576Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:54.846874Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:54.851143Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:54.874315Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:54.886953Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmp8bhZfh/pdisk_1.dat 2025-05-05T03:20:55.232121Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796406456464611:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:55.232194Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:20:55.253029Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17767 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:20:55.334773Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:55.334801Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:55.335103Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:55.335868Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:55.342492Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:55.347974Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:55.354166Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:55.382912Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:20:55.393998Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:55.747398Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796408687946645:2137];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmpALwX9u/pdisk_1.dat 2025-05-05T03:20:55.750872Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:20:55.764663Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20368 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:55.846522Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:55.846552Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:55.846883Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:55.848213Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:20:55.851635Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:20:55.868510Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:55.882249Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:20:56.249832Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796413300194058:2264];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:56.249877Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c63/r3tmp/tmpYTupvO/pdisk_1.dat 2025-05-05T03:20:56.280214Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28529 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:56.353266Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:56.353310Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:56.353656Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:20:56.355132Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:56.363648Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:56.379701Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:20:56.392505Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] |94.7%| [TA] $(B)/ydb/tests/functional/autoconfig/test-results/py3test/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/tests/functional/autoconfig/test-results/py3test/{meta.json ... results_accumulator.log} >> test_cp_ic.py::TestCpIc::test_discovery >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 [GOOD] >> TLocksTest::CK_Range_BrokenLock >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d53/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d53/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {'__DEFAULT': 100000}, portions: 2 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1095402 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string >> test.py::test_run_benchmark[generic-row] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list [GOOD] >> test.py::test_run_benchmark[scan-row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test.py::test_run_benchmark[scan-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] >> test_dispatch.py::TestMapping::test_mapping >> TLocksTest::CK_Range_BrokenLockInf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1337160) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-05-05T03:20:59.234334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796425248343271:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:59.234351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmpMInEOd/pdisk_1.dat 2025-05-05T03:20:59.312341Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23872 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:59.381425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:59.381449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:59.382066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:59.382420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:59.386425Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.402346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:59.410163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.448504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.462648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.712176Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796424877393735:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:59.716557Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmpAzlIXa/pdisk_1.dat 2025-05-05T03:20:59.735275Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61999 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:59.812235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:59.812264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:59.815071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:20:59.815439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.820891Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.832022Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:59.833045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.843669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:59.859319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:00.190899Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796430919067735:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:00.191008Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmpHlTXO6/pdisk_1.dat 2025-05-05T03:21:00.211684Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15231 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:00.294557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:00.294586Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:00.294886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:00.302537Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:00.306290Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:00.316110Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:21:00.323305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:00.342082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:21:00.365712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:21:00.701268Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796429941388550:2215];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:00.702530Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmpbIjcch/pdisk_1.dat 2025-05-05T03:21:00.734432Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22148 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ... PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:02.346326Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:02.346355Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:02.346557Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:02.347238Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:02.355010Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:02.362912Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:21:02.367126Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:02.384434Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:02.405514Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmpFLTqyp/pdisk_1.dat 2025-05-05T03:21:02.934279Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:21:02.934963Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17478 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:03.003750Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:03.003779Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:03.004054Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.006488Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:03.007421Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.023321Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:21:03.024478Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.050557Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.074108Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.406407Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796441969037005:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:03.406493Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmp70MNDV/pdisk_1.dat 2025-05-05T03:21:03.420307Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1959 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:03.506957Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:03.506983Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:03.507357Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:03.508107Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:03.509122Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.524390Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.540608Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.555317Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.888361Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796442396353170:2150];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:03.895775Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c59/r3tmp/tmpe32lXX/pdisk_1.dat 2025-05-05T03:21:03.911152Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6273 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:03.990747Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:03.990776Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:03.991092Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:03.991934Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:04.000794Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:04.015686Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:04.034696Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] >> test.py::test_run_determentistic[column] [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TLocksTest::BrokenLockErase ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> TCancelTx::CrossShardReadOnly >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d49/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d49/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1096630 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> TCancelTx::ImmediateReadOnly [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [GOOD] >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-05-05T03:21:07.847409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796458680741355:2267];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:07.847746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c49/r3tmp/tmpV5rAGz/pdisk_1.dat 2025-05-05T03:21:07.966857Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61970 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:21:08.001609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:08.001636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:08.002584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:08.004427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:21:08.008060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:61970 2025-05-05T03:21:08.096000Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709162:2379] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T03:21:08.096029Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709162:2379] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:08.102156Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709175:2389] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T03:21:08.102182Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709175:2389] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:08.111262Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709187:2398] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T03:21:08.111290Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709187:2398] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:08.132209Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709214:2419] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T03:21:08.132235Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709214:2419] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:08.136934Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709227:2429] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T03:21:08.136959Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709227:2429] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:08.144626Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709240:2439] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T03:21:08.144652Z node 1 :TX_PROXY ERROR: Actor# [1:7500796462975709240:2439] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:08.409755Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796463378690855:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:08.410237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c49/r3tmp/tmptsGNAi/pdisk_1.dat 2025-05-05T03:21:08.434699Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30514 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:08.509181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:08.509212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:08.509566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:08.510277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:08.516836Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:21:08.523960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:30514 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c49/r3tmp/tmpWSatkH/pdisk_1.dat 2025-05-05T03:21:08.958325Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:21:08.962174Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61856 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:21:09.036078Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:09.036128Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:09.036543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:09.037208Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:09.038383Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:21:09.041835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:61856 2025-05-05T03:21:09.083846Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116717:2382] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T03:21:09.083872Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116717:2382] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:09.095673Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116732:2394] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T03:21:09.095710Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116732:2394] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:09.103150Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116747:2406] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T03:21:09.103176Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116747:2406] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:09.125125Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116773:2426] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T03:21:09.125154Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116773:2426] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:09.131491Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116787:2437] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T03:21:09.131527Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116787:2437] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:09.146606Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116800:2447] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T03:21:09.146635Z node 3 :TX_PROXY ERROR: Actor# [3:7500796468906116800:2447] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000c49/r3tmp/tmpUMqacg/pdisk_1.dat 2025-05-05T03:21:09.458324Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:21:09.460682Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18733 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:09.535244Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:09.535277Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:09.535827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:09.536331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:09.537860Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:21:09.538997Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18733 2025-05-05T03:21:09.588221Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-05-05T03:21:09.588313Z node 4 :TX_PROXY ERROR: Actor# [4:7500796466228664975:2379] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T03:21:09.591254Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-05-05T03:21:09.591342Z node 4 :TX_PROXY ERROR: Actor# [4:7500796466228664989:2387] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] >> test_retry.py::TestRetry::test_low_rate[kikimr0] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test.py::test_run_determentistic[row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view [GOOD] >> TLocksTest::BrokenDupLock [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-05-05T03:21:07.543419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796459024145295:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:07.544218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmpg6uWW9/pdisk_1.dat 2025-05-05T03:21:07.626844Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8477 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:07.702957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:07.703006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:07.703973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:07.704342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:07.713527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:07.717551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:07.784020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:07.797668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.043023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796461841626263:2212];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:08.044039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmpsmkW2g/pdisk_1.dat 2025-05-05T03:21:08.060371Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12837 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:21:08.148799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:08.148834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:08.149198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:08.151428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:08.159746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.180462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.195500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.543435Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796461306743749:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:08.543463Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmpqNIVvF/pdisk_1.dat 2025-05-05T03:21:08.566469Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20439 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:08.643766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.644971Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.647688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.648562Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:08.648584Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:08.649065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:08.704091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:08.713172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:09.055312Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796465910765572:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmpzcSWjQ/pdisk_1.dat 2025-05-05T03:21:09.058969Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:21:09.075670Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:09.159032Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:09.159074Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:09.159499Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:21:09.161054Z node 4 :HIVE ... 00037e/r3tmp/tmpaXzZWV/pdisk_1.dat 2025-05-05T03:21:10.565548Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61455 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:21:10.646377Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:10.646409Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:10.646718Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:10.647367Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:10.657246Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:10.674224Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:10.686078Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:11.062175Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796475356828045:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:11.062324Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmpLQJzM1/pdisk_1.dat 2025-05-05T03:21:11.084109Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23568 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:21:11.164466Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:11.164505Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:11.164871Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:21:11.165490Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:11.175818Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:11.190041Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:21:11.205217Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:21:11.553881Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796476529835847:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:11.554238Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmpnqLBEK/pdisk_1.dat 2025-05-05T03:21:11.574025Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62807 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:11.660448Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:11.660493Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:11.661071Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:21:11.663362Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:11.672632Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:11.688387Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:11.705191Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:12.051737Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796482243488941:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:12.052039Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00037e/r3tmp/tmp1uywiE/pdisk_1.dat 2025-05-05T03:21:12.067416Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:12.155941Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:12.155978Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:12.156411Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:12.157254Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:21:12.161908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:12.177882Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:12.191495Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] |95.0%| [TA] $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> TFlatTest::SplitInvalidPath >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> TFlatTest::SplitThenMerge [GOOD] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-05-05T03:21:18.917251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796507788044775:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:21:18.917480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bfe/r3tmp/tmpGiQeEY/pdisk_1.dat 2025-05-05T03:21:18.986851Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61316 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:19.019188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:19.019214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:19.020295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:21:19.051204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:21:19.060137Z node 1 :FLAT_TX_SCHEMESHARD WARN: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825, tableStr: /dc-1/Dir1, tableId: , opId: 281474976715659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2025-05-05T03:21:19.060619Z node 1 :TX_PROXY ERROR: Actor# [1:7500796512083012653:2296] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825 2025-05-05T03:21:19.318415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796511984486484:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bfe/r3tmp/tmpcE0m7q/pdisk_1.dat 2025-05-05T03:21:19.323254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:21:19.329870Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18668 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:21:19.421702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:21:19.421728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:21:19.422107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:21:19.423183Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:21:19.423649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:21:19.428983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:21:19.461264Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:21:19.462784Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:21:19.471935Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:21:19.473080Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415279490 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T03:21:19.480695Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:21:19.481151Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.481198Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:21:19.481412Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.481446Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:21:19.481509Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:21:19.481641Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.481669Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:21:19.481727Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:21:19.481846Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.482001Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:21:19.482069Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:21:19.482216Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.482238Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:21:19.482288Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:21:19.482405Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.482424Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:21:19.482473Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:21:19.482587Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.482606Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:21:19.482653Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:21:19.482768Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.482912Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:21:19.483000Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:21:19.483169Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.483200Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:21:19.483265Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:21:19.483429Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.483453Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:21:19.483528Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T03:21:19.483649Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T03:21:19.483684Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:21:19.483734Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T03:21:19.483872Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its ... 715693 Step: 0 Generation: 1 2025-05-05T03:21:19.597853Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-05-05T03:21:19.597859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7500796511984486925 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T03:21:19.597867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T03:21:19.597869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-05-05T03:21:19.597894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T03:21:19.597944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796511984487546 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T03:21:19.597951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715693, tablet: 72075186224037894, partId: 0 2025-05-05T03:21:19.597970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796511984487546 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T03:21:19.597991Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-05-05T03:21:19.597997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7500796511984487546 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T03:21:19.598004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598011Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598020Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715693:0 129 -> 240 2025-05-05T03:21:19.598070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598142Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-05-05T03:21:19.598148Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T03:21:19.598149Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-05T03:21:19.598175Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-05-05T03:21:19.598178Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-05-05T03:21:19.598241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:21:19.598273Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-05-05T03:21:19.598277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-05T03:21:19.598280Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-05-05T03:21:19.598282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-05T03:21:19.598284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-05-05T03:21:19.598292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500796511984487768:2427] message: TxId: 281474976715693 2025-05-05T03:21:19.598295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-05T03:21:19.598298Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715693:0 2025-05-05T03:21:19.598300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715693:0 2025-05-05T03:21:19.598329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T03:21:19.602142Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:21:19.602168Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-05T03:21:19.602430Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:21:19.602448Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-05-05T03:21:19.602752Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:21:19.602780Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:21:19.602899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796511984486925 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-05T03:21:19.602929Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:21:19.602997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796511984487546 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-05-05T03:21:19.603001Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:21:19.603045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:21:19.603059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:21:19.603226Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T03:21:19.603233Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-05-05T03:21:19.603768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:21:19.603827Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T03:21:19.603833Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-05-05T03:21:19.603964Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:21:19.603964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:21:19.603989Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T03:21:19.604020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T03:21:19.604042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:21:19.604059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:21:19.604062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:21:19.604070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:21:19.604182Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:21:19.604185Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-05T03:21:19.604219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:21:19.604224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:21:19.604231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T03:21:19.604234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-05T03:21:19.604240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:21:19.604322Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-05-05T03:21:19.604344Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [GOOD] >> test.py::test_run_benchmark[generic-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> TObjectStorageListingTest::ManyDeletes [GOOD] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-05-05T03:20:27.663191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796288507501484:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:27.663216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cb0/r3tmp/tmpsu93uW/pdisk_1.dat 2025-05-05T03:20:27.730741Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61035, node 1 2025-05-05T03:20:27.741189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:20:27.741202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:20:27.741204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:20:27.741245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14024 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:20:27.765448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:27.765491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:27.766899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:27.768295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.771156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:27.776253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:29.561112Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796296979563559:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:29.561516Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000cb0/r3tmp/tmp6QKBSC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16998, node 2 2025-05-05T03:20:29.583332Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:20:29.583590Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:20:29.583601Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:20:29.583603Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:20:29.583651Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1748 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:20:29.660997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:20:29.661030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:20:29.661505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:20:29.662050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:20:29.678386Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:20:29.688377Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:20:29.696160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ....2025-05-05T03:20:34.560970Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7500796296979563559:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:20:34.561033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; ....2025-05-05T03:20:44.571071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-05T03:20:44.571094Z node 2 :IMPORT WARN: Table profiles were not loaded .. 2025-05-05T03:20:56.224599Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-05-05T03:20:56.224624Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-05-05T03:20:56.224845Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037891 2025-05-05T03:20:56.224886Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037890 2025-05-05T03:20:56.225050Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-05-05T03:20:56.225057Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:20:56.225195Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037892 2025-05-05T03:20:56.225248Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037889 2025-05-05T03:20:56.225673Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-05-05T03:20:56.225685Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-05-05T03:20:56.225714Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T03:20:56.225714Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-05-05T03:20:56.227499Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746415256271 at tablet 72075186224037889 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746415256271 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-05T03:20:56.227521Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T03:20:56.227555Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T03:20:56.227562Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-05-05T03:20:56.227569Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746415256271:281474976719700] in PlanQueue unit at 72075186224037889 2025-05-05T03:20:56.227588Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1746415256271:281474976719700 2025-05-05T03:20:56.227652Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746415256271 at tablet 72075186224037891 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746415256271 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-05-05T03:20:56.227663Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-05T03:20:56.227696Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-05-05T03:20:56.227700Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-05-05T03:20:56.227705Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746415256271:281474976719700] in PlanQueue unit at 72075186224037891 2025-05-05T03:20:56.227732Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1746415256271:281474976719700 2025-05-05T03:20:56.227977Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T03:20:56.228061Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746415256271 at tablet 72075186224037890 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746415256271 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-05-05T03:20:56.228063Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T03:20:56.228077Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T03:20:56.228080Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-05-05T03:20:56.228083Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746415256271:281474976719700] in PlanQueue unit at 72075186224037890 2025-05-05T03:20:56.228091Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 got data tx from cache 1746415256271:281474976719700 2025-05-05T03:20:56.228096Z node 2 :TX_DATASHARD DEBUG: tx 281474976719700 released its data 2025-05-05T03:20:56.228102Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active ... D: 72075186224037892 } 2025-05-05T03:21:24.065614Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-05-05T03:21:24.065615Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-05T03:21:24.065631Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-05-05T03:21:24.065631Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-05-05T03:21:24.065633Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-05-05T03:21:24.065635Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746415284110:281474976721711] in PlanQueue unit at 72075186224037891 2025-05-05T03:21:24.065638Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-05-05T03:21:24.065640Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1746415284110:281474976721711 2025-05-05T03:21:24.065641Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746415284110:281474976721711] in PlanQueue unit at 72075186224037892 2025-05-05T03:21:24.065645Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1746415284110:281474976721711 2025-05-05T03:21:24.065786Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T03:21:24.065820Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T03:21:24.065820Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T03:21:24.065823Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T03:21:24.065863Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T03:21:24.065929Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037890 restored its data 2025-05-05T03:21:24.065962Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037889 restored its data 2025-05-05T03:21:24.066103Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T03:21:24.066115Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T03:21:24.066144Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-05-05T03:21:24.066180Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T03:21:24.066191Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T03:21:24.066264Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037892 restored its data 2025-05-05T03:21:24.066313Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T03:21:24.066407Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037890 restored its data 2025-05-05T03:21:24.066412Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T03:21:24.066416Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T03:21:24.066536Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T03:21:24.066569Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1746415284110} 2025-05-05T03:21:24.066663Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1746415284110} 2025-05-05T03:21:24.066696Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1746415284110} 2025-05-05T03:21:24.066699Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1746415284110} 2025-05-05T03:21:24.066705Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-05-05T03:21:24.066715Z node 2 :TX_DATASHARD DEBUG: Complete [1746415284110 : 281474976721711] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7500796533202831188:13831], exec latency: 0 ms, propose latency: 1 ms 2025-05-05T03:21:24.066724Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-05T03:21:24.066759Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-05-05T03:21:24.066774Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T03:21:24.066842Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037892 restored its data 2025-05-05T03:21:24.066855Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037889 restored its data 2025-05-05T03:21:24.066962Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T03:21:24.067695Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-05-05T03:21:24.067706Z node 2 :TX_DATASHARD DEBUG: Complete [1746415284110 : 281474976721711] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7500796533202831188:13831], exec latency: 1 ms, propose latency: 2 ms 2025-05-05T03:21:24.067710Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T03:21:24.067736Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-05-05T03:21:24.067745Z node 2 :TX_DATASHARD DEBUG: Complete [1746415284110 : 281474976721711] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7500796533202831188:13831], exec latency: 1 ms, propose latency: 2 ms 2025-05-05T03:21:24.067751Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-05-05T03:21:24.067863Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T03:21:24.067879Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T03:21:24.078578Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T03:21:24.078795Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037889 restored its data 2025-05-05T03:21:24.083111Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T03:21:24.085805Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-05-05T03:21:24.085830Z node 2 :TX_DATASHARD DEBUG: Complete [1746415284110 : 281474976721711] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7500796533202831188:13831], exec latency: 17 ms, propose latency: 20 ms 2025-05-05T03:21:24.085842Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T03:21:24.096253Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096407Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096476Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096526Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096558Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-05-05T03:21:24.096634Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096689Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096757Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096838Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096877Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:21:24.096927Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-05-05T03:21:24.096987Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-05-05T03:21:24.097040Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-05-05T03:21:24.097063Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.1%| [TA] $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv >> test_dispatch.py::TestMapping::test_idle |95.1%| [TA] {RESULT} $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv [GOOD] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> overlapping_portions.py::TestOverlappingPortions::test |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.3%| [TA] $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |95.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1340048) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_validation.py::TestS3::test_empty[v1-client0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_system_views.py::TestQueryMetricsUniqueQueries::test_case [GOOD] |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] |95.5%| [TA] $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} |95.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |95.6%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] |95.6%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_db_counters.py::TestKqpCounters::test_case >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_format_parquet[row] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_format_parquet[column] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-row] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataset] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-column] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-column] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_dispatch.py::TestMapping::test_idle [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-row] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-column] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-row] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-column] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-row] >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-column] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-row] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-column] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] [GOOD] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-row] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-column] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1345610) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-column] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-row] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-column] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] |95.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-row] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-column] >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-row] >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataset] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataにちは% set] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataset] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] [GOOD] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataset] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] [GOOD] >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataにちは% set] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataset] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] [GOOD] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataにちは% set] >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataにちは% set] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataset] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataにちは% set] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] [GOOD] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] [GOOD] >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000770/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000770/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1395314) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1399389 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataにちは% set] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataset] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_s3_0.py::TestS3::test_csv[v2-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] [GOOD] >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000752/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000752/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1403565) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1406212 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1330108) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [GOOD] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataにちは% set] |95.8%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] >> TStorageServiceTest::ShouldRegister >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] >> test_disk.py::TestSafeDiskBreak::test_erase_method >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] [GOOD] >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_s3_0.py::TestS3::test_csv[v1-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference[v2-client0] >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] |95.9%| [TA] $(B)/ydb/tests/functional/sqs/large/test-results/py3test/{meta.json ... results_accumulator.log} >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataにちは% set] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataset] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] >> test_s3_0.py::TestS3::test_inference[v2-client0] [GOOD] >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] |95.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/large/test-results/py3test/{meta.json ... results_accumulator.log} >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] [GOOD] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] [GOOD] >> TStorageServiceTest::ShouldUseGc [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] [GOOD] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataにちは% set] >> TStateStorageTest::ShouldSaveGetOldSmallState ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-05-05T03:22:16.394409Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500796755413606251:2048] with connection to localhost:14437:local 2025-05-05T03:22:16.394466Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:16.543899Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:16.543917Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:16.897947Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500796756327798312:2048] with connection to localhost:14437:local 2025-05-05T03:22:16.898034Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:16.935143Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:16.935162Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:16.935375Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:16.972702Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T03:22:16.972721Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:16.974457Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:16.992163Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-05-05T03:22:16.992182Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:17.433551Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500796761612754124:2048] with connection to localhost:14437:local 2025-05-05T03:22:17.433602Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:17.468476Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:17.468499Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:17.468745Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:22:17.578855Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:22:17.578871Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:22:17.578999Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:22:17.630255Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T03:22:17.630308Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:22:17.630442Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T03:22:17.654587Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T03:22:17.654606Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T03:22:17.654741Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:22:17.676186Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-05-05T03:22:17.676200Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:22:17.676333Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:17.695278Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-05-05T03:22:17.695292Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-05-05T03:22:17.695449Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T03:22:17.724160Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T03:22:18.003735Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500796759535741908:2048] with connection to localhost:14437:local 2025-05-05T03:22:18.003790Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:18.030227Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:18.030246Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:18.030395Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:22:18.139600Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:22:18.139621Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:22:18.139874Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-05-05T03:22:18.155828Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-05-05T03:22:18.155873Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-05-05T03:22:18.463952Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7500796766192766188:2048] with connection to localhost:14437:local 2025-05-05T03:22:18.463989Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7500796766192766289:2130] 2025-05-05T03:22:18.463999Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:18.490171Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:18.490188Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:18.490329Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:22:18.591724Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:22:18.591758Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:22:18.591878Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:22:18.641228Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T03:22:18.641244Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:22:18.641369Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:18.661222Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-05-05T03:22:18.661255Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-05-05T03:22:18.661269Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-05-05T03:22:18.661300Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-05-05T03:22:18.661419Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T03:22:18.685492Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T03:22:18.685508Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T03:22:18.685665Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:22:18.707416Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-05-05T03:22:18.707438Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:22:18.707661Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:18.726408Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-05-05T03:22:18.726428Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-05-05T03:22:18.726442Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-05-05T03:22:18.726479Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-05-05T03:22:18.726587Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-05-05T03:22:18.734171Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-05-05T03:22:18.734524Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-05-05T03:22:18.748284Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-05-05T03:22:18.748304Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-05-05T03:22:18.748454Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:22:18.769330Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-05-05T03:22:18.769361Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:22:18.769518Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:18.789366Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-05-05T03:22:18.789383Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-05-05T03:22:18.789395Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-05-05T03:22:18.789429Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-05-05T03:22:18.789543Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T03:22:18.797609Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-05-05T03:22:18.817383Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T03:22:18.917768Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T03:22:18.920754Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T03:22:19.021116Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T03:22:19.023970Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataにちは% set] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataset] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] >> test_formats.py::TestS3Formats::test_btc[v2] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataにちは% set] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> TLocksTest::Range_BrokenLock0 >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] [GOOD] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_btc[v2] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_formats.py::TestS3Formats::test_btc[v1] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataにちは% set] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataset] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |96.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] >> TObjectStorageListingTest::CornerCases >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-05-05T03:22:24.100926Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500796784305634820:2048] with connection to localhost:10758:local 2025-05-05T03:22:24.100983Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:24.289981Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:24.290019Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:24.290175Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:22:24.401702Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:22:24.401723Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:22:24.401880Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:24.426574Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T03:22:24.426600Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:24.426753Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T03:22:24.444803Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T03:22:24.444831Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T03:22:24.674457Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500796789153909452:2048] with connection to localhost:10758:local 2025-05-05T03:22:24.674512Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:24.704220Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:24.704238Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:24.704375Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:24.739460Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-05-05T03:22:24.739480Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-05-05T03:22:24.939714Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500796790819903687:2048] with connection to localhost:10758:local 2025-05-05T03:22:24.939781Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:24.967524Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:24.967544Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:24.967700Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-05-05T03:22:25.005162Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-05-05T03:22:25.005183Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-05-05T03:22:25.183674Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500796793333443203:2048] with connection to localhost:10758:local 2025-05-05T03:22:25.183726Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:25.213357Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:25.213376Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:25.213542Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:22:25.317218Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:22:25.317237Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:22:25.317371Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:25.349154Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-05-05T03:22:25.349172Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-05-05T03:22:25.643860Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7500796796037764462:2048] with connection to localhost:10758:local 2025-05-05T03:22:25.643912Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:25.670822Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T03:22:25.670842Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:25.670994Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T03:22:25.774544Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T03:22:25.774580Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T03:22:25.774725Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T03:22:25.830005Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T03:22:25.830028Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T03:22:25.830385Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T03:22:25.861156Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T03:22:25.861178Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T03:22:25.861373Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T03:22:25.879196Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T03:22:25.879216Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] [GOOD] >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataset] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] >> TObjectStorageListingTest::Decimal [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataにちは% set] |96.0%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_formats.py::TestS3Formats::test_btc[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00074a/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00074a/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1411234) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1413622 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |96.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000739/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000739/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1415970) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1417899 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bf1/r3tmp/tmpWh7XQc/pdisk_1.dat 2025-05-05T03:22:27.618400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:22:27.674524Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31145, node 1 2025-05-05T03:22:27.698452Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:22:27.698465Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:22:27.698467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:22:27.698509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-05T03:22:27.714096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:27.714128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:27.716846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14918 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:27.737673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:27.741508Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:27.748777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:28.315045Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796806215672561:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:28.315064Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bf1/r3tmp/tmpw1zgAk/pdisk_1.dat 2025-05-05T03:22:28.329979Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21269, node 2 2025-05-05T03:22:28.377853Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:22:28.377868Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:22:28.377870Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:22:28.377914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26273 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:28.423732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:28.423762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:28.424122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:28.427218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:28.430632Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:28.438892Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:28.441460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-05-05T03:22:27.189705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796804891811700:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:27.190180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000354/r3tmp/tmpyT7RZg/pdisk_1.dat 2025-05-05T03:22:27.269617Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17387 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T03:22:27.292074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:27.292110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:27.293043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:27.335190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:27.353143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:27.386561Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:22:27.387926Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:22:27.396642Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:22:27.398304Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415347411 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-05-05T03:22:27.429469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:22:27.429687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T03:22:27.429817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T03:22:27.429829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-05T03:22:27.429831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:22:27.429836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:22:27.429839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T03:22:27.429872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T03:22:27.429893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:22:27.430142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:22:27.430150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T03:22:27.430368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-05T03:22:27.430392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-05T03:22:27.430463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T03:22:27.430466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T03:22:27.430497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-05T03:22:27.430509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T03:22:27.430512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500796804891812188:2237], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-05T03:22:27.430516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500796804891812188:2237], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-05T03:22:27.430522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T03:22:27.430528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-05T03:22:27.430598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:22:27.430613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:22:27.430924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:22:27.430934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:22:27.430936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T03:22:27.430939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-05T03:22:27.430942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T03:22:27.430988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:22:27.430993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T03:22:27.430995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T03:22:27.430996Z node 1 :FLAT_TX_SCHEMESHARD INFO ... DEBUG: 72075186224037889 ack parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return to tablet 72075186224037891 2025-05-05T03:22:28.900250Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037891 state PreOffline 2025-05-05T03:22:28.900257Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-05-05T03:22:28.900258Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:22:28.900274Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-05T03:22:28.900321Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-05-05T03:22:28.900324Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-05-05T03:22:28.900338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T03:22:28.900364Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-05-05T03:22:28.900366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-05T03:22:28.900368Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:22:28.900368Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-05-05T03:22:28.900370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-05T03:22:28.900375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-05-05T03:22:28.900382Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-05-05T03:22:28.900384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500796807814926313:2697] message: TxId: 281474976715784 2025-05-05T03:22:28.900386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-05T03:22:28.900389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715784:0 2025-05-05T03:22:28.900391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715784:0 2025-05-05T03:22:28.900411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T03:22:28.900603Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-05-05T03:22:28.900654Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:28.900668Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7500796807814926339:2700], serverId# [2:7500796807814926342:3430], sessionId# [0:0:0] 2025-05-05T03:22:28.900674Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:28.900930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796807814924899 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-05-05T03:22:28.900944Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:28.900993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796807814924602 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-05T03:22:28.900996Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:28.901058Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T03:22:28.901062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:28.901076Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T03:22:28.901077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:28.901435Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:22:28.901454Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-05T03:22:28.901732Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:28.901798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796807814924891 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-05-05T03:22:28.901810Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:28.901866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:28.901867Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:22:28.902011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:22:28.902065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T03:22:28.902105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:22:28.902126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:22:28.902149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:22:28.902155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:22:28.902165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T03:22:28.902222Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T03:22:28.902281Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:22:28.902319Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T03:22:28.902345Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop Check that tablet 72075186224037888 was deleted 2025-05-05T03:22:28.902499Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:22:28.902510Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:22:28.902527Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T03:22:28.902551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:22:28.902555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:22:28.902561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:22:28.902562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:22:28.902569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:22:28.902577Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:22:28.902730Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-05T03:22:28.902986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:22:28.903074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T03:22:28.903115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:22:28.903122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T03:22:28.903129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:22:28.903151Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T03:22:28.903169Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500796807814925077:2621], serverId# [2:7500796807814925078:2622], sessionId# [0:0:0] 2025-05-05T03:22:28.903216Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:22:28.903237Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T03:22:28.903299Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:22:28.903347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:22:28.903351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:22:28.903360Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T03:22:28.903401Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-05T03:22:28.904001Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-05T03:22:28.905160Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] [GOOD] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] >> TLocksTest::Range_BrokenLock1 [GOOD] >> TLocksTest::Range_Pinhole >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataにちは% set] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-05-05T03:22:25.686872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796793602996970:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:25.687122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmpVzhA7O/pdisk_1.dat 2025-05-05T03:22:25.746628Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14585 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:25.780476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:25.788186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:25.820587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:25.820638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:25.821733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:25.851473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:25.864948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:26.197628Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796798030213093:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:26.197904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmpiQA72A/pdisk_1.dat 2025-05-05T03:22:26.214470Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11278 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:26.302776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:26.302809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:26.303333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:26.303587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:22:26.314438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:26.329169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:26.343310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:26.703668Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796798122224751:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmpPF9AiC/pdisk_1.dat 2025-05-05T03:22:26.710558Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:26.719203Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16024 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:26.807515Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:26.807545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:26.807989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:26.808608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:26.814637Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:26.818650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:26.834807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:22:26.846905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:27.275229Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796803970962002:2187];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmpx3OHCT/pdisk_1.dat 2025-05-05T03:22:27.288344Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:27.327168Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:27.375054Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:27.375098Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:27.375415Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:27.376253Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:27.377556Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, u ... 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:29.054994Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:29.055037Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:29.055455Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:29.055991Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:29.062188Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:29.073053Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:29.092591Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:22:29.105581Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmppdhlfV/pdisk_1.dat 2025-05-05T03:22:29.467023Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796812159087934:2221];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:29.485182Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:29.493233Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25922 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:29.570041Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:29.570076Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:29.570554Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:29.571136Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:29.577892Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:29.582571Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:29.598912Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:29.619837Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:30.115742Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796814781597829:2217];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:30.117170Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmp18mMNZ/pdisk_1.dat 2025-05-05T03:22:30.144834Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10195 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:30.221490Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:30.221540Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:30.221951Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.224349Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:30.224406Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.233536Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.254899Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.268959Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002cc/r3tmp/tmp9RvtY4/pdisk_1.dat 2025-05-05T03:22:30.660194Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796817251241084:2218];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:30.661600Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:30.695796Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5375 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:30.763712Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:30.763755Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:30.764194Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.765828Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:30.773755Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.793847Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:30.810426Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00072e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00072e/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1417623) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1419780 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/0007bd/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/0007bd/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1386026) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1387945 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] [GOOD] >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] [GOOD] >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] [SKIPPED] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] >> TObjectStorageListingTest::MaxKeysAndSharding >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] [GOOD] >> TFlatTest::SelectRangeBytesLimit >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] [GOOD] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmpFEohj7/pdisk_1.dat 2025-05-05T03:22:31.354294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796818485943275:2220];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:31.354491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:31.411041Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65432 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:31.458725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:31.458755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:31.459992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:31.464614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:31.470332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:31.490327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:31.491842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:31.571817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:31.634545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:31.984632Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796819506463793:2058];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:31.984653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmpapcsHu/pdisk_1.dat 2025-05-05T03:22:32.009020Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26427 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:32.094645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:32.094678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T03:22:32.095085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:32.102770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:32.103213Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:32.108658Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:32.109481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:32.134932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:32.146212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmpbis3eK/pdisk_1.dat 2025-05-05T03:22:32.562318Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:22:32.562692Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23610 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:32.642886Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:32.642919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:32.643127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:32.643873Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:32.654995Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:32.656423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:32.730135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:32.763915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:33.037748Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796827632497909:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmpEJSpf8/pdisk_1.dat 2025-05-05T03:22:33.041429Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:33.053799Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31476 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:33.141516Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:33.141549Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:33.141974Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:33.142539Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:33.146705Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.154592Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:33.159317Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.195334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.227051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.530389Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7500796830076207448:2091];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:33.530676Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmpZVfQOW/pdisk_1.dat 2025-05-05T03:22:33.550890Z node 5 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11256 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:33.625409Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected waiting... 2025-05-05T03:22:33.625441Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:33.625826Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:33.626540Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:33.627543Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.636426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.650815Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:33.670449Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.058854Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500796830927824001:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:34.058874Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmp8aNIIQ/pdisk_1.dat 2025-05-05T03:22:34.091999Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19065 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:34.166077Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:34.166111Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:34.166625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.170511Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:22:34.170657Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:34.184365Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.199546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.218026Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.557286Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500796833224523705:2142];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:34.559393Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000be2/r3tmp/tmpBmSR65/pdisk_1.dat 2025-05-05T03:22:34.584940Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21759 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:34.658101Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:34.658133Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T03:22:34.658677Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:34.659123Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:34.666379Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:34.670977Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.689694Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.700353Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-05-05T03:22:34.539752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796830939258101:2140];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:34.540729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bd5/r3tmp/tmpE6Olgo/pdisk_1.dat 2025-05-05T03:22:34.640842Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13132 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:34.665947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.668205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.680890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.714385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:34.714419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:34.718595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:35.062863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796836680653513:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:35.063276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bd5/r3tmp/tmpUKEC8J/pdisk_1.dat 2025-05-05T03:22:35.091003Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27761 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:35.162931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:35.162979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:35.164038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:35.168925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:35.178527Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:35.190663Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:35.195112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] >> TFlatTest::CopyTableAndCompareColumnsSchema >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] [GOOD] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] [GOOD] >> TLocksTest::NoLocksSet >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] [GOOD] |96.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] |96.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] [GOOD] >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] >> TFlatTest::LargeProxyReply >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> TFlatTest::CopyTableAndDropCopy [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> TFlatTest::SelectRangeReverse >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] >> test_validation.py::TestS3::test_empty[v1-client0] [GOOD] >> test_validation.py::TestS3::test_empty[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] [GOOD] >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-05-05T03:22:37.074385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796844662502535:2090];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:37.074673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000bbf/r3tmp/tmpmWjIqg/pdisk_1.dat 2025-05-05T03:22:37.162492Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23440 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:37.207044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:37.207079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:37.207824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:37.211189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:37.211652Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:22:37.219784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415357281 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746415357302 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) waiting... 2025-05-05T03:22:37.261153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1746415357316 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1746415357344 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) waiting... 2025-05-05T03:22:37.310867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1746415357365 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1746415357379 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-05-05T03:22:37.334764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715665 CreateStep: 1746415357393 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715666 CreateStep: 1746415357407 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildIn ... r::TEvDataShard::TEvProposeTransactionResult> complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.762036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.762043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.762064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.762068Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-05-05T03:22:41.762077Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-05-05T03:22:41.762080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-05-05T03:22:41.762079Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-05-05T03:22:41.762083Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-05-05T03:22:41.762084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-05-05T03:22:41.762085Z node 2 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-05-05T03:22:41.762087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-05-05T03:22:41.762096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500796862816172468:2402] message: TxId: 281474976715686 2025-05-05T03:22:41.762100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-05-05T03:22:41.762103Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-05-05T03:22:41.762105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-05-05T03:22:41.762105Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-05-05T03:22:41.762108Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-05-05T03:22:41.762131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-05-05T03:22:41.763127Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:7500796862816172577:3008], serverId# [2:7500796862816172578:3009], sessionId# [0:0:0] 2025-05-05T03:22:41.763158Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.763553Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.763578Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.764383Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [2:7500796862816172589:3017], serverId# [2:7500796862816172590:3018], sessionId# [0:0:0] 2025-05-05T03:22:41.764409Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.764690Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.764711Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.765316Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.765561Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.765571Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.766339Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.767190Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.767218Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.767934Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.768320Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.768334Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.768732Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1281 647 2154)b }, ecr=1.000 2025-05-05T03:22:41.768886Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T03:22:41.768915Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-05-05T03:22:41.769124Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.769466Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.769480Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.769657Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1155 521 2626)b }, ecr=1.000 2025-05-05T03:22:41.769828Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T03:22:41.769841Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-05-05T03:22:41.770389Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.770679Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.770701Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.771310Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.771625Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.771661Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.772665Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.772965Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.772982Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.773745Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.774115Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.774138Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.775097Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.775413Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.775443Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.776264Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.776429Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1637 647 6413)b }, ecr=1.000 2025-05-05T03:22:41.776637Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T03:22:41.776650Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-05-05T03:22:41.776770Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.776806Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.777174Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2390 1432 5183)b }, ecr=1.000 2025-05-05T03:22:41.777343Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T03:22:41.777354Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-05-05T03:22:41.777634Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T03:22:41.779017Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.779047Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.779800Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.781071Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.781106Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.782093Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 Check that tablet 72075186224037892 was deleted Check that tablet 72075186224037893 was deleted Check that tablet 72075186224037888 was deleted Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2025-05-05T03:22:41.783133Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T03:22:41.783163Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T03:22:41.784145Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T03:22:41.785100Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T03:22:41.785122Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T03:22:41.785400Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) 2025-05-05T03:22:41.785520Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) 2025-05-05T03:22:41.785616Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-05-05T03:22:41.785771Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-05-05T03:22:41.785898Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-05-05T03:22:41.785962Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] [GOOD] >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_s3_0.py::TestS3::test_raw[v2-false-client0] |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] [GOOD] >> test_insert.py::TestS3::test_append[v1-client0] >> TLocksTest::MultipleLocks [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-05-05T03:22:42.227797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796867175344344:2143];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:42.227976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b69/r3tmp/tmpRQdbwQ/pdisk_1.dat 2025-05-05T03:22:42.306747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:22:42.328608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:42.328635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:42.329740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:42.345130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.348104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.358267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.676501Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796868793086980:2210];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:42.676586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b69/r3tmp/tmp5EOVdM/pdisk_1.dat 2025-05-05T03:22:42.692813Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64280 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:42.779148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:42.779181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:42.779471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.780188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:42.783588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |96.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-05-05T03:22:40.435619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796857578474075:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:40.435792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpwNNRRa/pdisk_1.dat 2025-05-05T03:22:40.498512Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25523 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:40.533460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:40.544858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:40.569353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:40.569391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:40.570492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:40.608886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:40.623329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:40.940504Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796859013883209:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:40.940526Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpzY2FtW/pdisk_1.dat 2025-05-05T03:22:40.960897Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17752 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:41.044057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:41.044089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:41.044419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.045080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:41.050743Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:41.055120Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:41.056227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:41.071041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:41.085599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpv4wNnA/pdisk_1.dat 2025-05-05T03:22:41.456881Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796862913203522:2218];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:41.464203Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:41.472498Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15817 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:41.559161Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:41.559187Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:41.559627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.561026Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:41.562764Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:41.565838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:41.581411Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:41.598785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpHSVWlj/pdisk_1.dat 2025-05-05T03:22:41.946566Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796861508635495:2208];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:41.946665Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:41.971786Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9349 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:42.049461Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:42.049508Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:42.049827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.050486Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:42.056550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.071694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.085975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.445412Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7500796867758521358:2263];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:42.445462Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpRFFA8Z/pdisk_1.dat 2025-05-05T03:22:42.461109Z node 5 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31868 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:42.549715Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:42.549759Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:42.550120Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:42.550716Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:42.558386Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.561660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:42.576157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:42.589462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:42.942544Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500796868209427237:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:42.942559Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpi4KA4J/pdisk_1.dat 2025-05-05T03:22:42.957110Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28286 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:43.049691Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:43.049739Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:43.050092Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:43.050501Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:22:43.057823Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.073639Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.086623Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.465344Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500796869773270911:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:43.467551Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002b4/r3tmp/tmpjb4ymD/pdisk_1.dat 2025-05-05T03:22:43.487237Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9493 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:43.569764Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:43.569796Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:43.570052Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.571499Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:43.571682Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.575017Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:43.576257Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.600670Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.621581Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> TFlatTest::LargeProxyReplyRW [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> TLocksTest::SetLockFail >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] >> test_s3_0.py::TestS3::test_raw[v2-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] [GOOD] >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-05-05T03:22:41.335878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796864355130217:2132];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:41.336246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000338/r3tmp/tmpsV8cSh/pdisk_1.dat 2025-05-05T03:22:41.399092Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1655 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:41.468281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:41.468315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:41.469018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:41.469334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:41.471939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:41.482569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-05-05T03:22:43.258673Z node 1 :TX_PROXY ERROR: Actor# [1:7500796868650100994:4122] txid# 281474976716010 MergeResult Result too large TDataReq marker# P18 2025-05-05T03:22:43.258712Z node 1 :TX_PROXY ERROR: Actor# [1:7500796868650100994:4122] txid# 281474976716010 RESPONSE Status# ExecResultUnavailable marker# P13c 2025-05-05T03:22:43.445384Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796873531167583:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:43.445408Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000338/r3tmp/tmpOSD5cM/pdisk_1.dat 2025-05-05T03:22:43.464751Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19506 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:43.549847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:43.549881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:43.551021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:43.551451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.558852Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:43.565254Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:43.570945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:45.377758Z node 2 :TX_PROXY ERROR: Actor# [2:7500796882121105734:4127] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-05-05T03:22:45.377795Z node 2 :TX_PROXY ERROR: Actor# [2:7500796882121105734:4127] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] [GOOD] |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TLocksTest::SetEraseSet [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] [GOOD] >> test_insert.py::TestS3::test_append[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] >> test_insert.py::TestS3::test_append[v2-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] [GOOD] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-05-05T03:22:45.719389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796880369441745:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:45.719616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b64/r3tmp/tmpSe6XYN/pdisk_1.dat 2025-05-05T03:22:45.786901Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10491 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:45.858449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:45.858477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:45.859368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:45.861245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:45.866382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:45.878466Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:45.880065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:22:45.927190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:45.939008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:45.959134Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715662: Validate (783): Key validation status: 3 2025-05-05T03:22:45.959198Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442597:2492] txid# 281474976715662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T03:22:45.959234Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442597:2492] txid# 281474976715662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T03:22:45.959244Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442597:2492] txid# 281474976715662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T03:22:45.960014Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715663: Validate (783): Key validation status: 3 2025-05-05T03:22:45.960131Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442619:2499] txid# 281474976715663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T03:22:45.960152Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442619:2499] txid# 281474976715663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T03:22:45.960160Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442619:2499] txid# 281474976715663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-05-05T03:22:45.960765Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715664: Validate (783): Key validation status: 3 2025-05-05T03:22:45.960857Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442626:2503] txid# 281474976715664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T03:22:45.960875Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442626:2503] txid# 281474976715664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T03:22:45.960878Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442626:2503] txid# 281474976715664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T03:22:45.961379Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715665: Validate (783): Key validation status: 3 2025-05-05T03:22:45.961441Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442632:2506] txid# 281474976715665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T03:22:45.961456Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442632:2506] txid# 281474976715665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T03:22:45.961459Z node 1 :TX_PROXY ERROR: Actor# [1:7500796880369442632:2506] txid# 281474976715665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-05-05T03:22:46.181319Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796883536763543:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:46.181339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b64/r3tmp/tmpQJEOvt/pdisk_1.dat 2025-05-05T03:22:46.197902Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11893 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:46.286747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:46.286785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:46.287168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:46.287891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:22:46.298436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:22:46.313258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:46.327004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:46.612559Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796886097448221:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:46.612576Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b64/r3tmp/tmpwVWNhM/pdisk_1.dat 2025-05-05T03:22:46.645782Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12758 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:46.724295Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:46.724330Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:46.724839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:46.725547Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:46.734762Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:46.739755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:46.761523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:46.783451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000783/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000783/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1390411) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1392865 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] [GOOD] >> TFlatTest::WriteMergeAndRead >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] >> TLocksTest::Range_IncorrectNullDot1 >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] >> TObjectStorageListingTest::SchemaChecks [GOOD] >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-05-05T03:22:34.251311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796833541423403:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:34.251553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000343/r3tmp/tmpi3f85b/pdisk_1.dat 2025-05-05T03:22:34.318232Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27754, node 1 2025-05-05T03:22:34.335595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:22:34.335607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:22:34.335609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:22:34.335673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62372 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:22:34.353550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:34.353576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:34.354700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:34.392362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.395163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:34.411940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:39.251687Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7500796833541423403:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:39.251721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000343/r3tmp/tmpwbdOcP/pdisk_1.dat 2025-05-05T03:22:48.336142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:22:48.337267Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7669, node 2 2025-05-05T03:22:48.351495Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:22:48.351507Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:22:48.351509Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:22:48.351546Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4551 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:48.426564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:48.426595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:48.426996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:48.430655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:48.433485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test_s3_0.py::TestS3::test_raw[v1-true-client0] >> TFlatTest::WriteSplitAndRead [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] [GOOD] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] [GOOD] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-05-05T03:22:48.516800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796893757800083:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:48.516906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000334/r3tmp/tmpZZGy59/pdisk_1.dat 2025-05-05T03:22:48.628620Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62938 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:48.687649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:48.687671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:48.688498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:48.691031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:48.691132Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:48.709408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:48.780765Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:22:48.788586Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:22:48.809223Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:22:48.811350Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-05T03:22:48.886293Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.887104Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T03:22:48.887140Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T03:22:48.888246Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.889113Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T03:22:48.889124Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T03:22:48.889754Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.890222Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T03:22:48.890239Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T03:22:48.890782Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-05-05T03:22:48.890888Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.891190Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T03:22:48.891194Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-05-05T03:22:48.892535Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-05-05T03:22:48.893623Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T03:22:48.893629Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-05-05T03:22:48.893662Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T03:22:48.893679Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415368810 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T03:22:48.898971Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.899609Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.899757Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.900026Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.900128Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.900204Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.900334Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.900375Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.900436Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T03:22:48.900556Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.900615Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.900672Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.900793Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.900821Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.900868Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T03:22:48.900972Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.900992Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.901037Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.901155Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.901173Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.901214Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T03:22:48.901315Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.901337Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.901386Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.901495Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.901521Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.901569Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T03:22:48.901685Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.901799Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.901939Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.902139Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.902169Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.902246Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T03:22:48.902388Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.902430Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.902484Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.902612Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.902639Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.902688Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T03:22:48.902821Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.902847Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T03:22:48.902895Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T03:22:48.903013Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T03:22:48.903042Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T03:22:48.903096Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 7207518622403788 ... 224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:49.397345Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7500796897121643814:2401], serverId# [2:7500796897121643827:2687], sessionId# [0:0:0] 2025-05-05T03:22:49.397388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796897121643252 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-05-05T03:22:49.397406Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:49.397506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T03:22:49.397670Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T03:22:49.397702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796897121643611 RawX2: 4503608217307459 } TabletId: 72075186224037891 State: 4 2025-05-05T03:22:49.397712Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:49.397788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:49.397813Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T03:22:49.398073Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:22:49.398089Z node 2 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-05-05T03:22:49.398330Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:22:49.398342Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-05T03:22:49.398543Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T03:22:49.398545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:22:49.398557Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-05T03:22:49.398628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T03:22:49.398697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:22:49.398727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:22:49.398737Z node 2 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:49.398757Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T03:22:49.398761Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T03:22:49.398766Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7500796897121643731:2613], serverId# [2:7500796897121643734:2616], sessionId# [0:0:0] 2025-05-05T03:22:49.398934Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T03:22:49.398972Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T03:22:49.398995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:22:49.399007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T03:22:49.399016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:22:49.399020Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T03:22:49.399023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:22:49.399060Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:22:49.399302Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:49.399409Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T03:22:49.399427Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T03:22:49.399656Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T03:22:49.399709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796897121643596 RawX2: 4503608217307457 } TabletId: 72075186224037892 State: 4 2025-05-05T03:22:49.399717Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:49.399777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796897121643251 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-05T03:22:49.399780Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:49.399834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:49.399854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:49.400050Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-05T03:22:49.400063Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T03:22:49.400084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796897121643598 RawX2: 4503608217307458 } TabletId: 72075186224037890 State: 4 2025-05-05T03:22:49.400092Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:22:49.400171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:22:49.400192Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T03:22:49.400910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T03:22:49.400988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:22:49.401035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:22:49.401061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:22:49.401085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:22:49.401108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:22:49.401129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:22:49.401138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:22:49.401148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:22:49.401198Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-05T03:22:49.401211Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T03:22:49.401230Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T03:22:49.401290Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T03:22:49.401313Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-05-05T03:22:49.401631Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T03:22:49.401645Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T03:22:49.401788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T03:22:49.401799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T03:22:49.401807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:22:49.401809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:22:49.401888Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-05T03:22:49.401890Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:22:49.401893Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:22:49.401912Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T03:22:49.401935Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T03:22:49.402061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:22:49.402064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:22:49.402073Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> test_insert.py::TestS3::test_append[v2-client0] [GOOD] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] [GOOD] >> test_insert.py::TestS3::test_part_split[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] [GOOD] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] |96.5%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeReverseItemsLimit |96.5%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> TExportToS3Tests::RebootDuringCompletion >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_insert.py::TestS3::test_part_split[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] [GOOD] >> test_insert.py::TestS3::test_part_split[v2-client0] >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places >> TExportToS3Tests::EnableChecksumsPersistance ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-05-05T03:22:52.249150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796908962477679:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:52.250081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b5e/r3tmp/tmpqVu1AQ/pdisk_1.dat 2025-05-05T03:22:52.349549Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17719 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:52.426297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:52.426336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:52.427040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:52.427346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:22:52.440036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.698781Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796908308804785:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:52.698814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b5e/r3tmp/tmpxgpxDP/pdisk_1.dat 2025-05-05T03:22:52.712372Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19237 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:52.802545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:52.802580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:52.802891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.803794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:52.808255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::SchemaMapping >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_missed[v2-false-client0] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] >> TExportToS3Tests::EncryptedExport >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-05-05T03:22:48.614532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796894044388481:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:48.616349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmpqoXUaY/pdisk_1.dat 2025-05-05T03:22:48.728440Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10010 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:48.797186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:48.797221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:48.797894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:48.799399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:48.802595Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:48.811757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:48.833475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:48.844321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmp4LY3iN/pdisk_1.dat 2025-05-05T03:22:49.085806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:22:49.086038Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18176 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:49.177971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:49.178012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:49.178476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:49.179965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:49.185527Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:49.190075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:49.249356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:49.263251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmp6htJ8W/pdisk_1.dat 2025-05-05T03:22:49.585163Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796896497490102:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:49.585176Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:49.601589Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62761 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:49.691206Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:49.691239Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:49.691665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:49.694641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:49.698453Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:49.707546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:49.728745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:49.738751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:50.093964Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796901460482039:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:50.093985Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmpyAAdjQ/pdisk_1.dat 2025-05-05T03:22:50.111255Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10771 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:50.198262Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:50.198295Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:50.198817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:50.199275Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: C ... xId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:51.685099Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:51.685149Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:51.685627Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:51.686145Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:51.686916Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:51.695586Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:51.712217Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:22:51.725318Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.076825Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796911653666193:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:52.076846Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmppDazQA/pdisk_1.dat 2025-05-05T03:22:52.094463Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26007 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:52.181251Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:52.181287Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:52.181929Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.182725Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:52.183202Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.192793Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.220714Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.237765Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmpYhs3e6/pdisk_1.dat 2025-05-05T03:22:52.607874Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:22:52.615824Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3674 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:52.701405Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:52.701439Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:52.701864Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:52.702533Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:52.710591Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.717625Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.739365Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:52.749860Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:53.090983Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796915838005848:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:53.091005Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b61/r3tmp/tmpPMHKgv/pdisk_1.dat 2025-05-05T03:22:53.111194Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20420 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:53.193906Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:53.193947Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:53.194327Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:53.194889Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:53.198868Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:53.209827Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:53.222671Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:22:53.235880Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] [GOOD] >> TExportToS3Tests::SchemaMapping [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] >> TExportToS3Tests::SchemaMappingEncryption |96.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> TExportToS3Tests::EncryptedExport [GOOD] >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TLocksFatTest::PointSetNotBreak >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] [GOOD] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] >> TLocksTest::Range_CorrectNullDot >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:22:52.739997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:22:52.740022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:52.740027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:22:52.740032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:22:52.740042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:22:52.740045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:22:52.740054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:52.740067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:22:52.740164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:22:52.740249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:22:52.752209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:22:52.752231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:22:52.755818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:22:52.756087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:22:52.756143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:22:52.757500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:22:52.757561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:22:52.757663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:52.757950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:22:52.758848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:52.759136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:52.759148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:52.759167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:22:52.759173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:52.759179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:22:52.759212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.760515Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:22:52.778519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:22:52.778602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.778696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:22:52.778753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:22:52.778765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.779778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:52.779814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:22:52.779872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.779885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:22:52.779890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:22:52.779896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:22:52.780769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.780785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:22:52.780792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:22:52.781234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.781246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.781266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:52.781273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:22:52.781953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:22:52.782535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:22:52.782586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:22:52.782791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:52.782824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:52.782832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:52.782913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:22:52.782922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:52.782958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:22:52.782971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:22:52.783568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:52.783579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:52.783642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:52.783649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:22:52.783724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:52.783731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:22:52.783743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:52.783748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:52.783753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:52.783756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:52.783760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:22:52.783766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:52.783770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:22:52.783774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:22:52.783787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:22:52.783793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:22:52.783797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:22:52.784144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:22:52.784162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 4046678944 2025-05-05T03:22:54.865511Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T03:22:54.865519Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:22:54.865542Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:22:54.865612Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.865622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.865628Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:22:54.865632Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-05T03:22:54.865637Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:22:54.865815Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.865826Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.865829Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:22:54.865834Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-05-05T03:22:54.865841Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-05T03:22:54.865852Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T03:22:54.866754Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:22:54.866791Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T03:22:54.866796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T03:22:54.866802Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-05T03:22:54.867056Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-05-05T03:22:54.867148Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867168Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867175Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867197Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867206Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T03:22:54.867210Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:22:54.867215Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T03:22:54.867232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:22:54.867240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:22:54.867248Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:22:54.867254Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-05T03:22:54.867260Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T03:22:54.867264Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-05T03:22:54.867268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-05-05T03:22:54.867276Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-05T03:22:54.867281Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-05T03:22:54.867288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T03:22:54.867292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:22:54.867411Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.867428Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.867851Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:54.867897Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:22:54.867919Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:54.867924Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-05T03:22:54.867949Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-05T03:22:54.868097Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.868110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.868114Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:22:54.868120Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T03:22:54.868124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:22:54.868227Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.868239Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.868243Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T03:22:54.868247Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:22:54.868251Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:22:54.868260Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-05T03:22:54.868268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T03:22:54.869056Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.869116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T03:22:54.869129Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-05T03:22:54.869139Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-05-05T03:22:54.869147Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:22:54.869151Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-05T03:22:54.869156Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-05-05T03:22:54.869546Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:22:54.869565Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-05T03:22:54.869572Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1120:2999] TestWaitNotification: OK eventTxId 103 >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] >> TFlatTest::SplitEmptyToMany >> test_insert.py::TestS3::test_part_split[v2-client0] [GOOD] >> test_insert.py::TestS3::test_part_merge[v1-client0] |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] [GOOD] |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] >> TExportToS3Tests::RebootDuringAbortion >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:22:53.361422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:22:53.361447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:53.361453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:22:53.361458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:22:53.361467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:22:53.361471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:22:53.361479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:53.361490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:22:53.361571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:22:53.361638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:22:53.386160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:22:53.386179Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:22:53.390019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:22:53.390374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:22:53.390428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:22:53.391914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:22:53.391968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:22:53.392064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:53.392329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:22:53.393149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:53.393417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:53.393430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:53.393449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:22:53.393456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:53.393462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:22:53.393494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.395062Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:22:53.411849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:22:53.411921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.411990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:22:53.412042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:22:53.412055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.412814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:53.412841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:22:53.412891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.412903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:22:53.412908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:22:53.412912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:22:53.413351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.413363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:22:53.413369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:22:53.413766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.413776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.413781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:53.413788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:22:53.414406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:22:53.414781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:22:53.414817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:22:53.414991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:53.415013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:53.415022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:53.415078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:22:53.415090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:53.415119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:22:53.415131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:22:53.415556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:53.415564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:53.415601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:53.415607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:22:53.415672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:53.415680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:22:53.415691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:53.415695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:53.415700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:53.415703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:53.415707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:22:53.415713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:53.415717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:22:53.415720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:22:53.415731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:22:53.415737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:22:53.415740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:22:53.416038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:22:53.416058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 20Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:22:55.540425Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T03:22:55.540429Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:22:55.540567Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.540576Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.540580Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:22:55.540584Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T03:22:55.540587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:22:55.540596Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:22:55.540985Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:22:55.541041Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T03:22:55.541051Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541193Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T03:22:55.541219Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T03:22:55.541293Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541325Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541333Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541365Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:22:55.541369Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:22:55.541373Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:22:55.541376Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:22:55.541390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:22:55.541399Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:22:55.541404Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T03:22:55.541410Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:22:55.541414Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T03:22:55.541418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T03:22:55.541425Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:22:55.541430Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T03:22:55.541435Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-05T03:22:55.541438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T03:22:55.541573Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.541593Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.541932Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541942Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:55.541965Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:22:55.541983Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:55.541987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T03:22:55.542003Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T03:22:55.542127Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.542137Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.542142Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:22:55.542149Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T03:22:55.542153Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T03:22:55.542270Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.542281Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.542284Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T03:22:55.542288Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T03:22:55.542292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T03:22:55.542301Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T03:22:55.542305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T03:22:55.542355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T03:22:55.542360Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T03:22:55.542369Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:22:55.542774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.543122Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T03:22:55.543141Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T03:22:55.543149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T03:22:55.543154Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:22:55.543156Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T03:22:55.543159Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-05-05T03:22:55.543197Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T03:22:55.543425Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-05-05T03:22:55.543467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-05T03:22:55.543471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-05T03:22:55.543513Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-05T03:22:55.543524Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-05T03:22:55.543530Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:547:2506] TestWaitNotification: OK eventTxId 103 >> test_s3_1.py::TestS3::test_missed[v2-false-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] [GOOD] >> test_s3_1.py::TestS3::test_missed[v2-true-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] [GOOD] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] >> TExportToS3Tests::ExportStartTime >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] [GOOD] >> TExportToS3Tests::ExportStartTime [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> TExportToS3Tests::ExportPartitioningSettings >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TExportToS3Tests::DropCopiesBeforeTransferring1 |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] >> test_s3_1.py::TestS3::test_missed[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestS3::test_part_merge[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] [GOOD] >> test_insert.py::TestS3::test_part_merge[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1391701) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1394415 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:22:56.566248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:22:56.566274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:56.566279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:22:56.566284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:22:56.566294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:22:56.566298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:22:56.566306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:56.566318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:22:56.566402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:22:56.566468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:22:56.578596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:22:56.578620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:22:56.587326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:22:56.587866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:22:56.587936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:22:56.589392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:22:56.589447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:22:56.589524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:56.589810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:22:56.590937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:56.591205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:56.591219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:56.591239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:22:56.591246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:56.591251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:22:56.591283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.592683Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:22:56.608589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:22:56.608646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.608704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:22:56.608749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:22:56.608761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.609417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:56.609443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:22:56.609502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.609512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:22:56.609516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:22:56.609521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:22:56.609898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.609907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:22:56.609911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:22:56.610266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.610275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.610281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:56.610288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:22:56.610868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:22:56.611224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:22:56.611256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:22:56.611415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:56.611437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:56.611446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:56.611506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:22:56.611518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:56.611547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:22:56.611559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:22:56.612031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:56.612040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:56.612077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:56.612081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:22:56.612138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:56.612145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:22:56.612156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:56.612160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:56.612164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:56.612167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:56.612171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:22:56.612176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:56.612181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:22:56.612185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:22:56.612195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:22:56.612200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:22:56.612204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:22:56.612432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:22:56.612442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... hardId: 72075186233409548 CpuTimeUsec: 67 } } 2025-05-05T03:22:58.752191Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-05-05T03:22:58.752224Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.752230Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-05-05T03:22:58.752794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.752837Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.752846Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:22:58.752858Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:22:58.752894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:22:58.753292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-05-05T03:22:58.753326Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2025-05-05T03:22:58.753411Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:58.753436Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:58.753443Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-05-05T03:22:58.753471Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-05-05T03:22:58.753497Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:2795 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AC1FD018-5F1C-4ABD-99BD-13D7590B4A44 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-05-05T03:22:58.766890Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:58.766912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T03:22:58.767025Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:58.767034Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-05-05T03:22:58.767188Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.767201Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-05T03:22:58.767450Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T03:22:58.767466Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T03:22:58.767471Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-05T03:22:58.767496Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-05T03:22:58.767503Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-05T03:22:58.767528Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-05-05T03:22:58.768811Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:2795 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CAA85212-55DC-4930-996D-EA9BD77A119E amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:2795 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4149F3E6-138D-4BB7-8D47-82A86494F428 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-05-05T03:22:58.773257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 512 RawX2: 17179871654 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:22:58.773277Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2025-05-05T03:22:58.773303Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 512 RawX2: 17179871654 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:22:58.773318Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 512 RawX2: 17179871654 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:22:58.773335Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:58.773340Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.773345Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-05T03:22:58.773352Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T03:22:58.773398Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:58.773978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.774054Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.774066Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T03:22:58.774080Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:22:58.774085Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:22:58.774090Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:22:58.774094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:22:58.774101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T03:22:58.774118Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710759 2025-05-05T03:22:58.774124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:22:58.774129Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T03:22:58.774134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T03:22:58.774169Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-05T03:22:58.774865Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T03:22:58.774889Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T03:22:58.775485Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:22:58.775500Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:538:2488] TestWaitNotification: OK eventTxId 102 >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TLocksFatTest::PointSetRemove [GOOD] >> TExportToS3Tests::DisableAutoDropping >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> TExportToS3Tests::DisableAutoDropping [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] >> TFlatTest::LargeDatashardReplyDistributed >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-05-05T03:22:55.233476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796923512200921:2136];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:55.234301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002a3/r3tmp/tmpEbXXZa/pdisk_1.dat 2025-05-05T03:22:55.292970Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26302 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:55.367631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:55.367673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:55.368362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:55.368697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T03:22:55.376674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:55.398434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:55.412481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:57.310997Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796929824699092:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002a3/r3tmp/tmpF3O29C/pdisk_1.dat 2025-05-05T03:22:57.313144Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:22:57.328635Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26469 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:57.414277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:57.414312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:57.414622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:57.415155Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:57.418380Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:22:57.428548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:57.444093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:57.460841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/0002a3/r3tmp/tmp1SBplO/pdisk_1.dat 2025-05-05T03:22:58.520929Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:22:58.521693Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21958 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:58.607608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:58.607643Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:58.608149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:58.608771Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:58.617863Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:58.626454Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:58.627644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:58.643948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:58.667141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] >> TLocksTest::Range_EmptyKey [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:22:58.184735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:22:58.184762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:58.184767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:22:58.184772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:22:58.184782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:22:58.184786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:22:58.184794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:22:58.184807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:22:58.184907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:22:58.184986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:22:58.197342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:22:58.197365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:22:58.201130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:22:58.201522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:22:58.201591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:22:58.203322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:22:58.203397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:22:58.203512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:58.203848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:22:58.205011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:58.205324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:58.205338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:58.205365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:22:58.205374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:58.205380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:22:58.205417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.207331Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:22:58.226870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:22:58.226946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.227035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:22:58.227085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:22:58.227095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.228011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:58.228043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:22:58.228097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.228108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:22:58.228113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:22:58.228118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:22:58.228639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.228654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:22:58.228659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:22:58.229140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.229154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.229160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:58.229167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:22:58.229805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:22:58.230333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:22:58.230380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:22:58.230573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:22:58.230599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:22:58.230606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:58.230670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:22:58.230686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:22:58.230717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:22:58.230728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:22:58.231376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:22:58.231385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:22:58.231443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:22:58.231449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:22:58.231515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:22:58.231523Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:22:58.231535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:58.231540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:58.231544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:22:58.231547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:58.231553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:22:58.231558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:22:58.231563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:22:58.231566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:22:58.231589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:22:58.231597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:22:58.231600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:22:58.231909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:22:58.231927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 57594046678944 2025-05-05T03:23:00.260695Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:23:00.260703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:00.260729Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:00.260896Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.260909Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.260913Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:00.260917Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T03:23:00.260923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:23:00.261278Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.261293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.261320Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:00.261325Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-05T03:23:00.261330Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T03:23:00.261342Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:23:00.261766Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:00.261840Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:23:00.261848Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:23:00.261855Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:23:00.261883Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:23:00.261910Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-05-05T03:23:00.262025Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:00.262049Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:00.262057Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-05T03:23:00.262085Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:23:00.262095Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:23:00.262100Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:00.262105Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:23:00.262109Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:00.262117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:00.262126Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T03:23:00.262133Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:23:00.262139Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:00.262144Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:23:00.262148Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:23:00.262156Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T03:23:00.262162Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:23:00.262167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-05T03:23:00.262171Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-05T03:23:00.262752Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.262781Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.264466Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:00.264483Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:00.264528Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-05T03:23:00.264556Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:00.264563Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:23:00.264568Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:23:00.264749Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.264764Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.264769Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:00.264774Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T03:23:00.264779Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:23:00.264937Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.264947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.264952Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:00.264956Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-05T03:23:00.264960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T03:23:00.264971Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:23:00.264977Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T03:23:00.265782Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.265847Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:00.265885Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:23:00.265898Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:23:00.265907Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:00.265912Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:23:00.265918Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-05T03:23:00.266474Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:00.266499Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:23:00.266506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:614:2571] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-05-05T03:22:55.570331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796921062145119:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:55.571101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpROCzn3/pdisk_1.dat 2025-05-05T03:22:55.643281Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62917 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:55.714014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:55.716022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:55.716045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T03:22:55.718241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:22:55.718527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:55.727582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:55.749418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:55.759505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:56.034466Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796925676759749:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:56.034539Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpNgV2I8/pdisk_1.dat 2025-05-05T03:22:56.060073Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65196 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:56.136492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:56.136523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:56.136868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:56.137486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:56.147989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:56.166601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:56.178281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:22:56.509061Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500796929393233791:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:56.509101Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpVTq6Kv/pdisk_1.dat 2025-05-05T03:22:56.521443Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2083 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:56.612280Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:56.612306Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:56.612724Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:56.614532Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:56.615952Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:56.622754Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:22:56.627007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:56.646948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T03:22:56.659726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:56.994170Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796928634624591:2137];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:56.996326Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpKvN8BX/pdisk_1.dat 2025-05-05T03:22:57.013774Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5361 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:57.096446Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:57.096478Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:57.096873Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboper ... 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:58.618765Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:58.618793Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:58.619141Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:58.619834Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:58.622835Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:58.632166Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:58.659614Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:58.683081Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.249951Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796941492374600:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:59.249971Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpu00qf0/pdisk_1.dat 2025-05-05T03:22:59.268196Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8806 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:59.354424Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:59.354465Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:59.354781Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.355624Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:22:59.360217Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.375388Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.389562Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.667155Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796940284545168:2263];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:59.667235Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpefPpOr/pdisk_1.dat 2025-05-05T03:22:59.686744Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16376 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:22:59.771668Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected waiting... 2025-05-05T03:22:59.771701Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:59.772162Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:22:59.772801Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:22:59.774685Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.780882Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:59.803043Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:22:59.820378Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00032d/r3tmp/tmpgp0TGN/pdisk_1.dat 2025-05-05T03:23:00.168605Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796944681864727:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:00.180078Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:23:00.187860Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5909 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... 2025-05-05T03:23:00.274523Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:00.274564Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:00.275032Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:23:00.275548Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:23:00.276479Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:23:00.284947Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:00.300376Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:00.313697Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] |96.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] >> test_insert.py::TestS3::test_part_merge[v2-client0] [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] [GOOD] >> TExportToS3Tests::UserSID >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] >> TFlatTest::WriteSplitKillRead >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] [GOOD] >> TExportToS3Tests::UserSID [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] [GOOD] >> TLocksTest::BrokenSameKeyLock >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] >> TExportToS3Tests::TablePermissions >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:23:02.351285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:23:02.351314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:02.351319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:23:02.351324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:23:02.351334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:23:02.351339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:23:02.351349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:02.351362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:23:02.351471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:23:02.351562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:23:02.364931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:23:02.364962Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:23:02.369398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:23:02.369933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:23:02.370047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:23:02.372911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:23:02.372986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:23:02.373081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:02.373418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:23:02.379555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:02.379952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:02.379965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:02.379995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:23:02.380004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:02.380010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:23:02.380062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.381727Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:23:02.401464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:02.401542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.401631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:23:02.401681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:02.401692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.402683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:02.402716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:23:02.402783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.402795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:23:02.402801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:23:02.402807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:23:02.405229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.405248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:02.405256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:23:02.405803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.405830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.405836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:02.405843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:23:02.406535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:02.407059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:23:02.407108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:23:02.407314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:02.407344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:02.407352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:02.407420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:23:02.407429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:02.407466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:23:02.407478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:23:02.407913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:02.407922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:02.407969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:02.407975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:23:02.408049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:02.408057Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:23:02.408069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:02.408074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:02.408079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:02.408082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:02.408087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:23:02.408093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:02.408098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:23:02.408102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:23:02.408113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:02.408120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:23:02.408125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:23:02.408478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:23:02.408506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... X_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-05-05T03:23:03.751438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:03.751494Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:03.751502Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:03.751515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:03.751554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:03.752207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-05-05T03:23:03.752259Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-05-05T03:23:03.752371Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:03.752406Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:03.752414Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-05-05T03:23:03.752450Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-05-05T03:23:03.752487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:7524 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 65D0EA6A-4A7E-4B72-AB1D-4038AA03FF31 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-05-05T03:23:03.760169Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:03.760186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T03:23:03.760260Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:03.760267Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-05-05T03:23:03.760392Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:03.760401Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-05T03:23:03.760558Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T03:23:03.760571Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T03:23:03.760576Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-05T03:23:03.760581Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T03:23:03.760586Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-05T03:23:03.760604Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:7524 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 949FC0FC-508D-428B-9413-CD1713EEEDE1 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 2025-05-05T03:23:03.762383Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:7524 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 30EBC74D-583C-4249-8898-62CDBA57FF54 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:7524 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 69E08647-C887-41F9-A916-183436619788 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-05-05T03:23:03.766191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 17179871605 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:23:03.766232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-05T03:23:03.766261Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 17179871605 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:23:03.766276Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 452 RawX2: 17179871605 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:23:03.766292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:03.766297Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:03.766301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T03:23:03.766310Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T03:23:03.766352Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:03.766891Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:03.766992Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:03.767002Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T03:23:03.767016Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:23:03.767020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:03.767025Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:23:03.767028Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:03.767033Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T03:23:03.767048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710759 2025-05-05T03:23:03.767055Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:03.767060Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T03:23:03.767064Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T03:23:03.767090Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:23:03.767579Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T03:23:03.767593Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T03:23:03.768128Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-05T03:23:03.768141Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:481:2442] TestWaitNotification: OK eventTxId 103 |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-05-05T03:23:03.272103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796959170042746:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:03.272148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000326/r3tmp/tmp5M02RM/pdisk_1.dat 2025-05-05T03:23:03.335067Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28177 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:03.374255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:03.374293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:03.375382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:23:03.405303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.414570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.421268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.453722Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:23:03.454708Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:23:03.462660Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:23:03.463835Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-05T03:23:03.472750Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415383482 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-05-05T03:23:03.495619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976715680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T03:23:03.495681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-05-05T03:23:03.495752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:23:03.495756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:23:03.495758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T03:23:03.495801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T03:23:03.495839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-05-05T03:23:03.495844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T03:23:03.496037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715680, response: Status: StatusAccepted TxId: 281474976715680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-05T03:23:03.496047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-05-05T03:23:03.496075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715680:0, at schemeshard: 72057594046644480 2025-05-05T03:23:03.496079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-05T03:23:03.496130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:23:03.496139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:23:03.496146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T03:23:03.496200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T03:23:03.496217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-05T03:23:03.496221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-05-05T03:23:03.496229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715680, partId: 0, tablet: 72057594037968897 2025-05-05T03:23:03.496232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715680, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T03:23:03.496234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715680, shardIdx: 72057594046644480:4, partId: 0 2025-05-05T03:23:03.496234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715680, shardIdx: 72057594046644480:5, partId: 0 2025-05-05T03:23:03.496892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715680, at schemeshard: 72057594046644480 2025-05-05T03:23:03.496901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715680, ready parts: 0/1, is published: true 2025-05-05T03:23:03.496905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715680, at schemeshard: 72057594046644480 2025-05-05T03:23:03.497113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle T ... 9 2025-05-05T03:23:03.972114Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-05-05T03:23:03.972117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-05-05T03:23:03.972120Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-05-05T03:23:03.972121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-05-05T03:23:03.972125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715694, ready parts: 1/1, is published: true 2025-05-05T03:23:03.972137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500796955499955395:2423] message: TxId: 281474976715694 2025-05-05T03:23:03.972139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-05-05T03:23:03.972143Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715694:0 2025-05-05T03:23:03.972145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715694:0 2025-05-05T03:23:03.972181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T03:23:03.972885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499955040 RawX2: 4503608217307455 } TabletId: 72075186224037892 State: 4 2025-05-05T03:23:03.972908Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.972993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T03:23:03.973104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499955243 RawX2: 4503608217307482 } TabletId: 72075186224037894 State: 4 2025-05-05T03:23:03.973118Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.973173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:03.973448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499955039 RawX2: 4503608217307454 } TabletId: 72075186224037890 State: 4 2025-05-05T03:23:03.973460Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.973493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499954699 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-05-05T03:23:03.973499Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.973546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:03.973559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:03.973790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499955044 RawX2: 4503608217307456 } TabletId: 72075186224037891 State: 4 2025-05-05T03:23:03.973817Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.973897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:03.977153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T03:23:03.977259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-05T03:23:03.977337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T03:23:03.977359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T03:23:03.977376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:23:03.977393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T03:23:03.977408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:23:03.977438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:23:03.977457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:23:03.977476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:23:03.977578Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T03:23:03.977582Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-05T03:23:03.977584Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-05T03:23:03.977587Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:23:03.977731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T03:23:03.977737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T03:23:03.977747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T03:23:03.977749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-05T03:23:03.977753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:23:03.977755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:23:03.977778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:23:03.977779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T03:23:03.977783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:23:03.977786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:23:03.983670Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:23:03.985479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499954702 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-05T03:23:03.985515Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.985687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:03.985723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500796955499955246 RawX2: 4503608217307483 } TabletId: 72075186224037893 State: 4 2025-05-05T03:23:03.985727Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:03.985806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:03.986611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:23:03.986708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:23:03.986766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-05T03:23:03.986785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:23:03.986806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:23:03.986809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:23:03.986824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:23:03.987100Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:23:03.987102Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-05T03:23:03.987422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:23:03.987427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:23:03.987438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T03:23:03.987441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-05T03:23:03.987448Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] |96.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] |96.8%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] [GOOD] |96.8%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] [GOOD] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TObjectStorageListingTest::Split >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] [GOOD] >> TExportToS3Tests::CheckItemProgress >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [GOOD] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] [GOOD] >> TObjectStorageListingTest::SuffixColumns [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] >> TExportToS3Tests::CheckItemProgress [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-05-05T03:23:06.208921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796968270399367:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:06.208943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00042d/r3tmp/tmp3cWcqP/pdisk_1.dat 2025-05-05T03:23:06.281376Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18156, node 1 2025-05-05T03:23:06.302416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:23:06.302427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:23:06.302429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:23:06.302466Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-05T03:23:06.310963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:06.310990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:06.312034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21201 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:06.353129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-05T03:23:06.361659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415386422 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415386422 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/00042d/r3tmp/tmpDYI3Li/pdisk_1.dat 2025-05-05T03:23:06.680194Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:23:06.688388Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26479, node 2 2025-05-05T03:23:06.698310Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:23:06.698323Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:23:06.698325Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:23:06.698370Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1639 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:06.776866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:06.776894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:06.777286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:06.777910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:06.787780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:06.917544Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7500796968974694183:2483], Recipient [2:7500796968974693529:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-05-05T03:23:06.917558Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-05-05T03:23:06.917585Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:23:06.917648Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-05-05T03:23:06.917661Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-05-05T03:23:06.917672Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-05-05T03:23:06.917677Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-05-05T03:23:06.917685Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-05-05T03:23:06.917700Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-05-05T03:23:06.919953Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7500796968974694187:2484], Recipient [2:7500796968974693529:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-05-05T03:23:06.919964Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-05-05T03:23:06.919988Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T03:23:06.920021Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-05-05T03:23:06.920031Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-05-05T03:23:06.920045Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TExportToS3Tests::Checksums >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] [GOOD] >> TExportToS3Tests::Checksums [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] [SKIPPED] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TLocksTest::BrokenSameShardLock [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> TFlatTest::LargeDatashardReplyRW [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::Changefeeds >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] [GOOD] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-05-05T03:23:03.412791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796958721889228:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:03.451814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmpf4iUJ4/pdisk_1.dat 2025-05-05T03:23:03.483028Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24953 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:03.551890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:03.551916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:03.552988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:23:03.553573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:23:03.560749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.580834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.590641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.867894Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796957838769749:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmpZqmAB8/pdisk_1.dat 2025-05-05T03:23:03.870089Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:23:03.882552Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3378 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:03.903654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.906325Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.910311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:03.969064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:03.969091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:03.970265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:04.080844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-05T03:23:04.093319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmpL5Un1E/pdisk_1.dat 2025-05-05T03:23:04.374361Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T03:23:04.375249Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14277 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... 2025-05-05T03:23:04.458217Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:04.458247Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:04.458590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:23:04.459271Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:04.472665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:04.498834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:04.514390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T03:23:04.846333Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500796963132097425:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:04.846350Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmph9Bb7J/pdisk_1.dat 2025-05-05T03:23:04.866349Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27013 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:04.954549Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:04.954591Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:04.954880Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:23:04.955859Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:04.956286Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:04.959899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... -1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:06.669452Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:06.669497Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:06.669981Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:06.670515Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:06.683093Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:06.699661Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:06.715529Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.062660Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500796973609745729:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:07.064165Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmpmFAU23/pdisk_1.dat 2025-05-05T03:23:07.076627Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3781 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:07.165611Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:07.165646Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:07.166138Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T03:23:07.166856Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:07.174728Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.184369Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T03:23:07.185783Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.197511Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.208206Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.571763Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500796974221271304:2137];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmpJi5qb0/pdisk_1.dat 2025-05-05T03:23:07.577269Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:23:07.585209Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13761 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:07.675491Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:07.675524Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:07.675944Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.676707Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:07.683277Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.697923Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:07.712088Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:08.084052Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500796979114343784:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:08.084068Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000436/r3tmp/tmpYXK0V4/pdisk_1.dat 2025-05-05T03:23:08.097445Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2627 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:08.188055Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:08.188090Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:08.188520Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:08.189040Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T03:23:08.194506Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:08.209689Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:08.223503Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[aggregation] >> TExportToS3Tests::Changefeeds [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-05-05T03:23:00.304924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796946326647252:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:00.305122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000440/r3tmp/tmpR8CZRt/pdisk_1.dat 2025-05-05T03:23:00.367321Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19777 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:23:00.405560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:00.405586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:00.406630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:23:00.407101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:23:00.417552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:04.153963Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-05-05T03:23:04.159338Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1746415383839:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-05-05T03:23:04.161172Z node 1 :TX_PROXY ERROR: Actor# [1:7500796959211555405:5907] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-05-05T03:23:04.161206Z node 1 :TX_PROXY ERROR: Actor# [1:7500796959211555405:5907] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-05-05T03:23:04.506359Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500796962182972527:2086];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:04.506671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000440/r3tmp/tmpWDGsJ3/pdisk_1.dat 2025-05-05T03:23:04.534642Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24241 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T03:23:04.606528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:04.606562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:04.610704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:23:04.610934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:04.614658Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T03:23:04.620668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:23:08.562281Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T03:23:08.567637Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T03:23:08.570298Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-05-05T03:23:08.570400Z node 2 :TX_PROXY ERROR: Actor# [2:7500796979362848039:5889] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> test_formats.py::TestS3Formats::test_precompute[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] [GOOD] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:23:06.714412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:23:06.714437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:06.714442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:23:06.714447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:23:06.714457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:23:06.714461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:23:06.714472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:06.714486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:23:06.714583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:23:06.714654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:23:06.727395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:23:06.727419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:23:06.731405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:23:06.731815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:23:06.731910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:23:06.733604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:23:06.733672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:23:06.733773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:06.734217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:23:06.735474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:06.735802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:06.735814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:06.735832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:23:06.735839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:06.735845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:23:06.735882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.737306Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:23:06.755843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:06.755916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.756006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:23:06.756060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:06.756072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.756985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:06.757036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:23:06.757092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.757103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:23:06.757109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:23:06.757115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:23:06.758133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.758152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:06.758158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:23:06.758686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.758701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.758707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:06.758715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:23:06.759554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:06.760042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:23:06.760102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:23:06.760301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:06.760327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:06.760334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:06.760403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:23:06.760415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:06.760448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:23:06.760460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:23:06.761062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:06.761073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:06.761123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:06.761141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:23:06.761217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:06.761225Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:23:06.761237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:06.761242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:06.761246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:06.761250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:06.761254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:23:06.761259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:06.761264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:23:06.761267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:23:06.761282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:06.761288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:23:06.761293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:23:06.761634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:23:06.761653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 7594046678944 2025-05-05T03:23:09.551353Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.551360Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:09.551390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:09.551511Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.551523Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.551527Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:09.551532Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T03:23:09.551537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:23:09.551739Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.551753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.551757Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:09.551761Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-05-05T03:23:09.551765Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-05T03:23:09.551776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:23:09.552399Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:09.552427Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:23:09.552432Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T03:23:09.552440Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:23:09.552973Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T03:23:09.553007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T03:23:09.553050Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-05-05T03:23:09.553346Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:09.553388Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:09.553397Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-05T03:23:09.553425Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.553434Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:23:09.553438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:09.553443Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T03:23:09.553446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:09.553454Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:09.553464Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-05T03:23:09.553469Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T03:23:09.553476Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T03:23:09.553493Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T03:23:09.553497Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T03:23:09.553510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-05T03:23:09.553515Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T03:23:09.553520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-05-05T03:23:09.553523Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-05-05T03:23:09.553665Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.554158Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:09.554172Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:09.554251Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-05-05T03:23:09.554276Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:09.554281Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T03:23:09.554286Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T03:23:09.554426Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.554436Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.554441Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:09.554445Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-05T03:23:09.554450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T03:23:09.554517Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.554525Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.554528Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:09.554531Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-05-05T03:23:09.554535Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-05T03:23:09.554542Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:23:09.554546Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T03:23:09.555438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.555527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:09.555544Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:23:09.555554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:23:09.555579Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:09.555584Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:23:09.555589Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-05-05T03:23:09.556074Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:09.556098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-05T03:23:09.556115Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1387:3177] TestWaitNotification: OK eventTxId 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:23:07.492684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:23:07.492712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:07.492717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:23:07.492722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:23:07.492732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:23:07.492736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:23:07.492746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:07.492760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:23:07.492861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:23:07.492937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:23:07.505008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:23:07.505031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:23:07.508821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:23:07.509146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:23:07.509226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:23:07.512079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:23:07.512153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:23:07.512244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:07.512615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:23:07.513717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:07.514054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:07.514067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:07.514086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:23:07.514093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:07.514099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:23:07.514133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.515646Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:23:07.533785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:07.533869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.533965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:23:07.534031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:07.534045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.535046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:07.535082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:23:07.535139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.535152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:23:07.535158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:23:07.535164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:23:07.535818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.535835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:07.535841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:23:07.536360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.536373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.536379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:07.536386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:23:07.537055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:07.537657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:23:07.537720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:23:07.537908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:07.537939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:07.537948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:07.538025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:23:07.538033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:07.538068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:23:07.538081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:23:07.538602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:07.538610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:07.538656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:07.538661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:23:07.538733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:07.538741Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:23:07.538753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:07.538757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:07.538762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:07.538765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:07.538769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:23:07.538774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:07.538778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:23:07.538782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:23:07.538794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:07.538800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:23:07.538804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:23:07.539134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:23:07.539161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 05-05T03:23:09.592955Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-05-05T03:23:09.592997Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.593005Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2025-05-05T03:23:09.593822Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.593890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.593900Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:09.593915Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2025-05-05T03:23:09.593956Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:09.594742Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2025-05-05T03:23:09.594793Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2025-05-05T03:23:09.595033Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:09.595068Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:09.595078Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2025-05-05T03:23:09.595117Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2025-05-05T03:23:09.595156Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:21100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0DC2DCD5-B6AF-4FE7-9DA1-CD06AFE69C51 amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2025-05-05T03:23:09.600745Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:09.600764Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-05T03:23:09.600856Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:09.600864Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2025-05-05T03:23:09.600977Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.601005Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710765 2025-05-05T03:23:09.601168Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-05-05T03:23:09.601203Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-05-05T03:23:09.601209Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-05-05T03:23:09.601214Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-05-05T03:23:09.601220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-05T03:23:09.601239Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:21100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 457D6EAA-89CA-4243-B9D5-A2692F796892 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 2025-05-05T03:23:09.602319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:21100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C8DA20D3-8434-4B89-9E71-548B7A92B29F amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2025-05-05T03:23:09.616062Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 812 RawX2: 17179871931 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:23:09.616095Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-05-05T03:23:09.616127Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 812 RawX2: 17179871931 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:23:09.616143Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 812 RawX2: 17179871931 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T03:23:09.616159Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:09.616164Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.616170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-05-05T03:23:09.616179Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2025-05-05T03:23:09.616235Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:09.617105Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.617244Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T03:23:09.617254Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-05-05T03:23:09.617272Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-05-05T03:23:09.617278Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-05T03:23:09.617284Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-05-05T03:23:09.617288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-05T03:23:09.617293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-05-05T03:23:09.617315Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710765 2025-05-05T03:23:09.617323Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-05T03:23:09.617328Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2025-05-05T03:23:09.617333Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2025-05-05T03:23:09.617366Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-05-05T03:23:09.617971Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-05-05T03:23:09.617989Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2025-05-05T03:23:09.618649Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-05T03:23:09.618677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [4:842:2769] TestWaitNotification: OK eventTxId 104 >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] [GOOD] >> test.py::test[aggregation] [GOOD] >> test.py::test[cardinality] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] |96.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |96.9%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> generator::import_test [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] [GOOD] |96.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/import_test >> generator::import_test [GOOD] |96.9%| [TS] {RESULT} ydb/tests/olap/docs/generator/import_test >> test.py::test[cardinality] [GOOD] >> test.py::test[intersect] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] [GOOD] >> test.py::test[action-eval_if-default.txt] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] [SKIPPED] >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] [SKIPPED] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] >> TExportToS3Tests::CancelledExportEndTime >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test.py::test[action-eval_if-default.txt] [GOOD] >> test.py::test[action-evaluate_queries-] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:125:2058] recipient: [1:109:2141] 2025-05-05T03:23:01.130999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:23:01.131026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:01.131032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:23:01.131037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:23:01.131049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:23:01.131053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:23:01.131062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:01.131076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:23:01.131170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:23:01.131246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:23:01.144749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:23:01.144774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:23:01.147541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:23:01.147583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:23:01.147635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:23:01.150919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:23:01.151038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:23:01.151157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:01.151233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:23:01.152648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:01.153241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:01.153261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:01.153357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:23:01.153368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:01.153376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:23:01.153417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.155304Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T03:23:01.184334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:01.184417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.184513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:23:01.184569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:01.184582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.185640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:01.185715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:23:01.185791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.185802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:23:01.185807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:23:01.185813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:23:01.186414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.186429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:01.186434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:23:01.186901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.186915Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.186921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:01.186928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:23:01.187568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:01.188056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:23:01.188099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:23:01.188315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:01.188341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:01.188351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:01.188423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:23:01.188431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:01.188461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:23:01.188473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:23:01.189021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:01.189030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:01.189081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:01.189087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:23:01.189159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:01.189166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:23:01.189178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:01.189183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:01.189188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:01.189191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:01.189195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:23:01.189200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:01.189204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:23:01.189208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:23:01.189220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:01.189226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:23:01.189230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:23:01.189506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:23:01.189517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 44] TDone opId# 281474976710758:0 ProgressState 2025-05-05T03:23:15.078012Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:23:15.078017Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:23:15.078021Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T03:23:15.078027Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:23:15.078032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2025-05-05T03:23:15.078049Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710758 2025-05-05T03:23:15.078055Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T03:23:15.078061Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T03:23:15.078065Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T03:23:15.078094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:23:15.078098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T03:23:15.078847Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T03:23:15.078868Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T03:23:15.079313Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-05-05T03:23:15.092438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-05T03:23:15.092460Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-05T03:23:15.093272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:3400" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:15.093358Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.093394Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T03:23:15.093517Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:15.093526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.093811Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-05-05T03:23:15.093823Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-05-05T03:23:15.094363Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:15.094434Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-05-05T03:23:15.094496Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-05-05T03:23:15.094506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-05-05T03:23:15.094582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.094593Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-05-05T03:23:15.094601Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-05-05T03:23:15.094605Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-05-05T03:23:15.095455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-05-05T03:23:15.095471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-05-05T03:23:15.095618Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.095627Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.095652Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-05-05T03:23:15.096458Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-05-05T03:23:15.096506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.096512Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:15.096531Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-05-05T03:23:15.096642Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-05-05T03:23:15.096661Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-05-05T03:23:15.096869Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-05-05T03:23:15.096907Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-05-05T03:23:15.097447Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-05-05T03:23:15.097854Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:23:15.097868Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:556:2515] TestWaitNotification: OK eventTxId 102 >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] [GOOD] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] >> test.py::test[intersect] [GOOD] >> test.py::test[run_optimize] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] >> test.py::test[run_optimize] [GOOD] >> test.py::test[serialize_deserialize] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_NONE-client0] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test.py::test[action-evaluate_queries-] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt] [SKIPPED] >> test.py::test[aggregate-GroupByTwoFields-] >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] [GOOD] >> test.py::test[aggregate-GroupByTwoFields-] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] >> ydb-tests-olap-s3_import::import_test [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] |96.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/import_test >> ydb-tests-olap-s3_import::import_test [GOOD] |96.9%| [TS] {RESULT} ydb/tests/olap/s3_import/import_test |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> test.py::test[serialize_deserialize] [GOOD] >> test.py::test[union] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] >> ydb-tests-tools-pq_read-test::import_test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] |96.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] [GOOD] |96.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] |96.9%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset-] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] [GOOD] |96.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |97.0%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] >> test.py::test[union] [GOOD] >> ydb-tests-functional-sqs-cloud::import_test [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] |97.0%| [TA] $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} |97.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |97.0%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/udfs/common/roaring/test/py3test >> test.py::test[union] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] |97.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |97.0%| [TM] {RESULT} ydb/library/yql/udfs/common/roaring/test/py3test >> ydb-tests-functional-restarts::import_test [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] >> ydb-tests-olap::import_test [GOOD] |97.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset-] [GOOD] >> test.py::test[aggregate-group_compact_sorted-] |97.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] |97.0%| [TS] {RESULT} ydb/tests/functional/restarts/import_test |97.0%| [TS] {RESULT} ydb/tests/olap/import_test |97.0%| [TA] {RESULT} $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] [GOOD] >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-parquet] >> test_ctas.py::TestYtCtas::test_simple_ctast >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] >> test_insert.py::TestS3::test_error[v1-client0-parquet] [SKIPPED] >> test_insert.py::TestS3::test_insert_empty_object[v1] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] [GOOD] >> TFlatTest::SplitEmptyTwice [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[aggregate-group_compact_sorted-] [GOOD] >> test.py::test[aggregate-percentiles_grouped-] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] >> BulkUpsert::BulkUpsert >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_NONE-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-05-05T03:22:55.811977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500796921317183678:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:22:55.812005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000b5a/r3tmp/tmpd4K8ex/pdisk_1.dat 2025-05-05T03:22:55.885267Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63637 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T03:22:55.914096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:22:55.914151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:22:55.915226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T03:22:55.955503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:55.973168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:22:55.976334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:22:56.010738Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T03:22:56.011522Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T03:22:56.020821Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T03:22:56.022605Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415376041 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746415376041 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-05-05T03:22:56.356684Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.41, eph 1} end=0, 2 blobs 459r (max 459), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (132881 0 0)b }, ecr=1.000 2025-05-05T03:22:56.356779Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.42, eph 1} end=0, 2 blobs 1377r (max 1377), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (90307 0 0)b }, ecr=1.000 2025-05-05T03:22:56.385545Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.56, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.003s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-05T03:22:56.385623Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.58, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.003s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-05T03:22:56.385668Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.60, eph 1} end=0, 2 blobs 505r (max 505), put Spent{time=0.003s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (32218 0 0)b }, ecr=1.000 2025-05-05T03:22:56.386294Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.61, eph 1} end=0, 2 blobs 1512r (max 1512), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103886 0 0)b }, ecr=1.000 2025-05-05T03:22:56.386357Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.522, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-05T03:22:56.401195Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.559, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.015s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-05T03:22:56.406361Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.85, eph 2} end=0, 2 blobs 726r (max 727), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (210005 0 0)b }, ecr=1.000 2025-05-05T03:22:56.406636Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.86, eph 2} end=0, 2 blobs 2178r (max 2181), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (142750 0 0)b }, ecr=1.000 2025-05-05T03:22:56.440211Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1032, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.013s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-05T03:22:56.440295Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.110, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.013s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-05T03:22:56.440398Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.111, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.013s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-05T03:22:56.440537Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.104, eph 3} end=0, 2 blobs 984r (max 985), put Spent{time=0.013s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (284501 0 0)b }, ecr=1.000 2025-05-05T03:22:56.440674Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.112, eph 2} end=0, 2 blobs 3084r (max 3084), put Spent{time=0.013s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (211727 0 0)b }, ecr=1.000 2025-05-05T03:22:56.440793Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.113, eph 2} end=0, 2 blobs 1029r (max 1029), put Spent{time=0.013s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (65480 0 0)b }, ecr=1.000 2025-05-05T03:22:56.451894Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.105, eph 3} end=0, 2 blobs 2952r (max 2955), put Spent{time=0.025s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (193492 0 0)b }, ecr=1.000 2025-05-05T03:22:56.455925Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1085, eph 2} end=0, 2 blobs 10001r (max 10520), put Spent{time=0.015s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-05T03:22:56.482849Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.137, eph 4} end=0, 2 blobs 1236r (max 1237), put Spent{time=0.013s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (357320 0 0)b }, ecr=1.000 2025-05-05T03:22:56.484349Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.138, eph 4} end=0, 2 blobs 3708r (max 3711), put Spent{time=0.014s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (243010 0 0)b }, ecr=1.000 2025-05-05T03:22:56.505791Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1539, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-05T03:22:56.509215Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.162, eph 5} end=0, 2 blobs 1487r (max 1488), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (429807 0 0)b }, ecr=1.000 2025-05-05T03:22:56.510396Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.168, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.001s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-05T03:22:56.510471Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.169, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.001s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-05T03:22:56.514638Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.163, eph 5} end=0, 2 blobs 4461r (max 4464), put Spent{time=0.010s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (292279 0 0)b }, ecr=1.000 2025-05-05T03:22:56.515280Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.170, eph 3} end=0, 2 blobs 1531r (max 1531), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (97306 0 0)b }, ecr=1.000 2025-05-05T03:22:56.517216Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.167, eph 3} end=0, 2 blobs 4590r (max 4590), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (315017 0 0)b }, ecr=1.000 2025-05-05T03:22:56.530681Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1638, eph 3} end=0, 2 blobs 10001r (max 10547), put Spent{time=0.015s,wait=0.00 ... 9 2025-05-05T03:23:23.443238Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-05-05T03:23:23.443240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-05-05T03:23:23.443243Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-05-05T03:23:23.443244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-05-05T03:23:23.443247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715690, ready parts: 1/1, is published: true 2025-05-05T03:23:23.443259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500797042771339297:2416] message: TxId: 281474976715690 2025-05-05T03:23:23.443261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-05-05T03:23:23.443265Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715690:0 2025-05-05T03:23:23.443267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715690:0 2025-05-05T03:23:23.443305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-05-05T03:23:23.443806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771338593 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-05-05T03:23:23.443818Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.443855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771338898 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T03:23:23.443857Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.443905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T03:23:23.443938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T03:23:23.444746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T03:23:23.444834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-05T03:23:23.444888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T03:23:23.444914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T03:23:23.445075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T03:23:23.445085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T03:23:23.445094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T03:23:23.445096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T03:23:23.445494Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T03:23:23.445498Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T03:23:23.446112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771338899 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-05T03:23:23.446131Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.446186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771339085 RawX2: 4503608217307465 } TabletId: 72075186224037894 State: 4 2025-05-05T03:23:23.446227Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.446255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771338596 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-05T03:23:23.446263Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.446277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771339089 RawX2: 4503608217307466 } TabletId: 72075186224037895 State: 4 2025-05-05T03:23:23.446286Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.446308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500797042771339080 RawX2: 4503608217307464 } TabletId: 72075186224037893 State: 4 2025-05-05T03:23:23.446317Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-05T03:23:23.446354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:23.446375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:23.446389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:23.446401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:23.446408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-05T03:23:23.448228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T03:23:23.448303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T03:23:23.448374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T03:23:23.448401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T03:23:23.448426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T03:23:23.448449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T03:23:23.448466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-05-05T03:23:23.448482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T03:23:23.448499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-05T03:23:23.448515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T03:23:23.448532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T03:23:23.448535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T03:23:23.448547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T03:23:23.449105Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-05T03:23:23.449114Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T03:23:23.449117Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-05-05T03:23:23.449119Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-05T03:23:23.449121Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T03:23:23.449378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T03:23:23.449391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T03:23:23.449402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T03:23:23.449404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-05T03:23:23.449409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T03:23:23.449410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T03:23:23.449414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-05-05T03:23:23.449416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-05-05T03:23:23.449419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T03:23:23.449422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-05T03:23:23.449429Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] [GOOD] >> test_insert.py::TestS3::test_insert_empty_object[v1] [GOOD] >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_insert.py::TestS3::test_insert_empty_object[v2] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore >> test.py::test[aggregate-percentiles_grouped-] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] [GOOD] |97.1%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] >> ReadUpdateWrite::Load |97.1%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] >> KqpTpch::Query01 >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] [GOOD] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] >> test_insert.py::TestS3::test_insert_empty_object[v2] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] >> test.py::test[bigdate-implicit_cast_callable-default.txt] [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] >> ydb-tests-functional-serializable::import_test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |97.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |97.1%| [TS] {RESULT} ydb/tests/functional/serializable/import_test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |97.1%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] >> ydb-tests-functional-canonical::import_test [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] [GOOD] |97.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |97.1%| [TS] {RESULT} ydb/tests/functional/canonical/import_test >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] [GOOD] >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000766/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000766/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1400120) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1403723 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] |97.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_BASIC-client0] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] >> test.py::test[action-eval_drop-] [SKIPPED] >> test.py::test[action-eval_skip_take-] >> test.py::test[action-action_eval_cluster_use-] [SKIPPED] >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] |97.1%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[action-eval_each_input_table-default.txt] [SKIPPED] >> test.py::test[action-eval_if_guard-default.txt] [SKIPPED] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt] [GOOD] >> test.py::test[blocks-boolean_ops-] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column-] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] |97.2%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] >> ydb-tests-functional-sqs-messaging::import_test [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000777/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000777/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1394561) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1398571 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |97.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |97.2%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] >> test.py::test[action-table_content_before_from_folder-] [SKIPPED] >> test.py::test[aggr_factory-bitand-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_expr_mul_col-] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] >> test.py::test[action-eval_skip_take-] [GOOD] >> test.py::test[action-pending_arg_fail-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] >> test.py::test[action-pending_arg_fail-] [SKIPPED] >> test.py::test[agg_apply-table-] [SKIPPED] >> test.py::test[aggr_factory-histogram-default.txt] [SKIPPED] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt] [SKIPPED] >> test.py::test[aggr_factory-median-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_list_in_key-default.txt] [SKIPPED] >> test.py::test[aggregate-avg_and_sum_float-] >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] >> test.py::test[blocks-boolean_ops-] [GOOD] >> test.py::test[blocks-combine_all_min-] |97.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |97.2%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] [GOOD] >> ydb-tests-olap-scenario::import_test [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column-] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg-] |97.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |97.2%| [TS] {RESULT} ydb/tests/olap/scenario/import_test >> test.py::test[aggregate-group_by_expr_mul_col-] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join-] >> test.py::test[solomon-BadDownsamplingAggregation-] >> test.py::test[aggregate-avg_and_sum_float-] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt] >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> test.py::test[action-dep_world_quote_code-default.txt] [SKIPPED] >> test.py::test[action-eval_for-default.txt] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] [GOOD] >> test.py::test[action-eval_typeof_output_table-] [SKIPPED] >> test.py::test[aggr_factory-avg_if-default.txt] [SKIPPED] >> test.py::test[aggr_factory-booland-default.txt] [SKIPPED] >> test.py::test[aggr_factory-count_if-default.txt] [SKIPPED] >> test.py::test[aggr_factory-min-default.txt] [SKIPPED] >> test.py::test[aggr_factory-min_by-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_expr_alias_on_subexp-] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] [GOOD] >> test.py::test[blocks-combine_all_min-] [GOOD] >> test.py::test[blocks-combine_all_minmax_nested-] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] >> TExportToS3Tests::AuditCancelledExport >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt] [GOOD] >> test.py::test[aggregate-group_by_hop_zero_delay-] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] >> test.py::test[aggregate-group_by_hop_zero_delay-] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_key_check-] >> ReadUpdateWrite::Load [GOOD] >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> test.py::test[aggregate-group_by_expr_with_join-] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum-] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] >> TExportToS3Tests::AuditCancelledExport [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] >> TExportToS3Tests::AutoDropping >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_BASIC-client0] [GOOD] >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> test.py::test[blocks-combine_all_minmax_nested-] [GOOD] >> test.py::test[blocks-compare_dates_floats_bools-] [SKIPPED] >> test.py::test[blocks-date_less_or_equal_scalar-] >> TExportToS3Tests::AutoDropping [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg-] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt] >> recipe::import_test [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test.py::test[aggregate-group_by_expr_alias_on_subexp-] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr-] >> test.py::test[aggregate-group_by_rollup_key_check-] [GOOD] >> test.py::test[aggregate-group_by_session-] |97.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/mdb_mock/import_test >> recipe::import_test [GOOD] |97.2%| [TS] {RESULT} ydb/tests/tools/mdb_mock/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.031192s 30% 0.031192s 50% 0.031192s 90% 0.031192s 99% 0.031192s Write: 10% 0.035730s 30% 0.035730s 50% 0.035730s 90% 0.035730s 99% 0.035730s Write: 10% 0.034058s 30% 0.034058s 50% 0.034058s 90% 0.034058s 99% 0.034058s Write: 10% 0.039659s 30% 0.039659s 50% 0.039659s 90% 0.039659s 99% 0.039659s Write: 10% 0.036190s 30% 0.036190s 50% 0.036190s 90% 0.036190s 99% 0.036190s Write: 10% 0.043247s 30% 0.043247s 50% 0.043247s 90% 0.043247s 99% 0.043247s Write: 10% 0.040756s 30% 0.040756s 50% 0.040756s 90% 0.040756s 99% 0.040756s Write: 10% 0.049150s 30% 0.049150s 50% 0.049150s 90% 0.049150s 99% 0.049150s Write: 10% 0.043847s 30% 0.043847s 50% 0.043847s 90% 0.043847s 99% 0.043847s Write: 10% 0.041769s 30% 0.041769s 50% 0.041769s 90% 0.041769s 99% 0.041769s Write: 10% 0.042207s 30% 0.042207s 50% 0.042207s 90% 0.042207s 99% 0.042207s Write: 10% 0.047031s 30% 0.047031s 50% 0.047031s 90% 0.047031s 99% 0.047031s Write: 10% 0.049144s 30% 0.049144s 50% 0.049144s 90% 0.049144s 99% 0.049144s Write: 10% 0.057119s 30% 0.057119s 50% 0.057119s 90% 0.057119s 99% 0.057119s Write: 10% 0.047429s 30% 0.047429s 50% 0.047429s 90% 0.047429s 99% 0.047429s Write: 10% 0.051594s 30% 0.051594s 50% 0.051594s 90% 0.051594s 99% 0.051594s Write: 10% 0.058787s 30% 0.058787s 50% 0.058787s 90% 0.058787s 99% 0.058787s Write: 10% 0.064739s 30% 0.064739s 50% 0.064739s 90% 0.064739s 99% 0.064739s Write: 10% 0.066915s 30% 0.066915s 50% 0.066915s 90% 0.066915s 99% 0.066915s Write: 10% 0.052804s 30% 0.052804s 50% 0.052804s 90% 0.052804s 99% 0.052804s Write: 10% 0.066845s 30% 0.066845s 50% 0.066845s 90% 0.066845s 99% 0.066845s Write: 10% 0.069944s 30% 0.069944s 50% 0.069944s 90% 0.069944s 99% 0.069944s Write: 10% 0.066983s 30% 0.066983s 50% 0.066983s 90% 0.066983s 99% 0.066983s Write: 10% 0.053388s 30% 0.053388s 50% 0.053388s 90% 0.053388s 99% 0.053388s Write: 10% 0.070125s 30% 0.070125s 50% 0.070125s 90% 0.070125s 99% 0.070125s Write: 10% 0.069797s 30% 0.069797s 50% 0.069797s 90% 0.069797s 99% 0.069797s Write: 10% 0.053360s 30% 0.053360s 50% 0.053360s 90% 0.053360s 99% 0.053360s Write: 10% 0.053290s 30% 0.053290s 50% Write: 10% 0.051242s 30% 0.051242s 50% 0.051242s 90% Write: 10% Write: 10% 0.057897s 30% 0.057897s 50% 0.057897s 90% 0.057897s 99% 0.057897s 0.053290s 90% Write: 10% 0.055270s 30% 0.055270s 50% 0.055270s 90% 0.055270s 99% 0.055270s 0.048991s 30% 0.048991s 50% 0.048991s 90% 0.048991s 99% 0.048991s 0.051242s 99% 0.051242s 0.053290s 99% 0.053290s Write: 10% 0.071463s 30% 0.071463s 50% 0.071463s 90% 0.071463s 99% 0.071463s Write: 10% 0.054407s 30% 0.054407s 50% 0.054407s 90% 0.054407s 99% 0.054407s Write: 10% 0.053865s 30% 0.053865s 50% 0.053865s 90% 0.053865s 99% 0.053865s Write: 10% 0.047922s 30% 0.047922s 50% 0.047922sWrite: 10% 0.061012s 30% 0.061012s 50% 0.061012s 90% 0.061012s 99% 0.061012s Write: 10% 0.048635s 30% 0.048635s 50% 0.048635s 90% 0.048635s 99% 0.048635s 90% 0.047922s 99% 0.047922s Write: 10% 0.055655s 30% 0.055655s 50% 0.055655s 90% 0.055655s 99% 0.055655s Write: 10% 0.060486s 30% 0.060486s 50% 0.060486s 90% 0.060486s 99% 0.060486s Write: 10% 0.068629s 30% 0.068629s 50% 0.068629s 90% 0.068629s 99% 0.068629s Write: 10% 0.056305s 30% 0.056305s 50% 0.056305s 90% 0.056305s 99% 0.056305s Write: 10% 0.047999s 30% 0.047999s 50% 0.047999s 90% 0.047999s 99% 0.047999s Write: 10% 0.044810s 30% 0.044810s 50% Write: 10% 0.063744s 30% 0.063744s 50% 0.063744s 90% 0.063744s 99% 0.063744s 0.044810s 90% 0.044810s 99% 0.044810s Write: 10% 0.044062s 30% 0.044062s 50% 0.044062s 90% 0.044062s 99% 0.044062s Write: 10% 0.043318s 30% 0.043318s 50% 0.043318s 90% 0.043318s 99% 0.043318s Write: 10% 0.047332s 30% 0.047332s 50% 0.047332s 90% 0.047332s 99% 0.047332sWriteWrite: 10% 0.044954s 30% 0.044954s 50% 0.044954s 90% 0.044954s 99% 0.044954s : 10% 0.035674s 30% 0.035674s 50% 0.035674s 90% 0.035674s 99% 0.035674s Write: 10% 0.038663s 30% 0.038663s 50% 0.038663s 90% 0.038663s 99% 0.038663s Write: 10% 0.040340s 30% 0.040340s 50% 0.040340s 90% 0.040340s 99% 0.040340s Write: 10% 0.042710s 30% 0.042710s 50% 0.042710s 90% 0.042710s 99% 0.042710s WriteWrite: 10% 0.039942s 30% 0.039942s 50% 0.039942s 90% : 10% 0.042383s 30% 0.042383s 50% 0.042383s 90% 0.042383s 99% 0.042383s 0.039942s 99% 0.039942s Write: 10% 0.046263s 30% 0.046263s 50% 0.046263s 90% 0.046263s 99% 0.046263s Write: 10% 0.038752s 30% 0.038752s 50% 0.038752s 90% 0.038752s 99% 0.038752s Write: 10% 0.038924s 30% 0.038924s 50% 0.038924s 90% 0.038924s 99% 0.038924s Write: 10% 0.043944s 30% 0.043944s 50% 0.043944s 90% 0.043944s 99% 0.043944s Write: 10% 0.038912s 30% 0.038912s 50% 0.038912s 90% 0.038912s 99% 0.038912s Write: 10% 0.038366s 30% 0.038366s 50% 0.038366s 90% 0.038366s 99% 0.038366s Write: 10% 0.050272s 30% 0.050272s 50% 0.050272s 90% 0.050272s 99% 0.050272s Write: 10% 0.038897s 30% 0.038897s 50% 0.038897s 90% 0.038897s 99% 0.038897s Write: 10% 0.049865s 30% 0.049865s 50% 0.049865s 90% 0.049865s 99% 0.049865s Step 2. read write Write: 10% 0.010999s 30% 0.010999s 50% 0.010999s 90% 0.010999s 99% 0.010999s Write: 10% 0.011345s 30% 0.011345s 50% 0.011345s 90% 0.011345s 99% 0.011345s Write: 10% 0.011104s 30% 0.011104s 50% 0.011104s 90% 0.011104s 99% 0.011104s Write: 10% 0.026936s 30% 0.026936s 50% 0.026936s 90% 0.026936s 99% 0.026936s Write: 10% 0.031507s 30% 0.031507s 50% 0.031507s 90% 0.031507s 99% Write: 10% 0.031716s 30% 0.031716s 50% 0.031716s 90% 0.031716s 99% 0.031716s 0.031507s Write: 10% 0.032968s 30% 0.032968s 50% 0.032968s 90% 0.032968s 99% 0.032968s Write: 10% 0.032572s 30% 0.032572s 50% 0.032572s 90% 0.032572s 99% 0.032572s Write: 10% 0.031777s 30% 0.031777s 50% 0.031777s 90% 0.031777s 99% 0.031777s Write: 10% 0.033212s 30% 0.033212s 50% 0.033212s 90% 0.033212s 99% 0.033212s Write: 10% 0.031368s 30% 0.031368s 50% 0.031368s 90% 0.031368s 99% 0.031368s Write: 10% 0.034607s 30% 0.034607s 50% 0.034607s 90% 0.034607s 99% 0.034607s Write: 10% 0.032576s 30% 0.032576s 50% 0.032576s 90% 0.032576s 99% 0.032576s Write: 10% 0.034422sWrite: 10% 0.034530s 30% 0.034530s 50% 0.034530s 90% 0.034530s 99% 0.034530s 30% 0.034422s 50% 0.034422s 90% 0.034422s 99% 0.034422s Write: 10% 0.036688s 30% 0.036688s 50% 0.036688s 90% 0.036688s 99% 0.036688s Write: 10% 0.036149sWrite: 10% 0.037474s 30% 0.037474s 50% 0.037474s 90% 0.037474s 99% 0.037474s 30% 0.036149s 50% 0.036149s 90% 0.036149s 99% 0.036149s Write: 10% 0.036632s 30% 0.036632s 50% 0.036632s 90% 0.036632s 99% 0.036632s Write: 10% 0.034014s 30% 0.034014s 50% 0.034014s 90% 0.034014s 99% 0.034014s Write: 10% 0.039834s 30% 0.039834s 50% 0.039834s 90% 0.039834s 99% 0.039834s Write: 10% 0.041451s 30% 0.041451s 50% 0.041451s 90% 0.041451s 99% 0.041451s Write: 10% 0.038882s 30% 0.038882s 50% 0.038882s 90% 0.038882s 99% 0.038882s Write: 10% Write: 10% 0.039384s 30% 0.039384s 50% 0.039384s 90% 0.039384s 99% 0.039384s Write: 10% 0.040635s 30% 0.040635s 50% 0.040635s 90% 0.040635s 99% 0.040635s Write: 10% 0.042116s 30% 0.042116s 50% 0.042116s 90% 0.042116s 99% 0.042116s Write: 10% 0.041518s 30% 0.041518s 50% 0.041518s 90% 0.041518s 99% 0.041518s Write0.042270s: 10% Write: 10% 0.041709s0.039527s 30% 0.042270s 50% 0.042270s 90% 0.042270s 30% 99% Write: 10% 0.035355s 30% 0.042270s 30% 0.041709s 50% 0.039527s Write 50% : 10% 0.039550s 30% 0.039527s0.035355s 90% 0.039527s 99% 50% 0.035355s 90% 0.039527s 0.039550s0.035355s0.041709s 90% 0.041709s 99% 0.041709s 99% 50% 0.039550s 90% 0.039550s0.035355s Write: 10% 0.035445s 30% 99% 0.039550s 0.035445s 50% 0.035445s 90% 0.035445s 99% 0.035445s Write: 10% 0.043001s 30% 0.043001s 50% 0.043001s 90% 0.043001s 99% 0.043001s Write: 10% 0.043885s 30% 0.043885s 50% 0.043885s 90% 0.043885s 99% 0.043885s Write: 10% 0.036080s 30% 0.036080s 50% 0.036080s 90% 0.036080s 99% 0.036080s Write: 10% 0.045462s 30% 0.045462s 50% 0.045462s 90% 0.045462s 99% 0.045462s Write: 10% 0.036218s 30% 0.036218s 50% 0.036218s 90% 0.036218s 99% 0.036218s WriteWrite: 10% 0.030275s 30% 0.030275s 50% 0.030275s: 10% Write: 10% 0.035876s 30% 0.035876s 50% 0.043942s 90% 0.030275s 99% 0.030275s 30% 0.043942s 50% 0.043942s 90% 0.043942s 99% 0.043942s0.035876s 90% 0.035876s 99% 0.035876s Write: 10% 0.024266s 30% 0.024266s 50% 0.024266s 90% 0.024266s 99% 0.024266s Write: 10% 0.026397s 30% 0.026397s 50% 0.026397s 90% 0.026397s 99% 0.026397s Write: 10% 0.028552s 30% 0.028552s 50% 0.028552s 90% 0.028552s 99% 0.028552s Write: 10% 0.028206s 30% 0.028206s 50% 0.028206s 90% 0.028206s 99% 0.028206s Write: 10% 0.027495s 30% 0.027495s 50% 0.027495s 90% 0.027495s 99% 0.027495s Write: 10% 0.031073s 30% 0.031073s 50% 0.031073s 90% 0.031073s 99% 0.031073s Write: 10% 0.028635s 30% 0.028635s 50% 0.028635s 90% 0.028635s 99% 0.028635sWrite: 10% 0.030721s 30% 0.030721s 50% 0.030721s 90% 0.030721s 99% 0.030721s Write: 10% 0.032373s 30% 0.032373s 50% 0.032373s 90% 0.032373s 99% 0.032373s Write: 10% 0.027966s 30% 0.027966s 50% 0.027966s 90% 0.027966s 99% 0.027966s WriteWrite: 10% 0.029467s 30% 0.029467s 50% 0.029467s 90% 0.029467s 99% 0.029467s : 10% Write: 10% 0.030201s 30% 0.030201s 50% 0.030201s 90% 0.030201s 99% 0.030201s 0.030116s 30% 0.030116s 50% 0.030116s 90% 0.030116s 99% 0.030116s Write: 10% 0.027908s 30% Write: 10% 0.028414s 30% 0.027908sWrite: 10% 0.028414s0.030253s 30% 0.030253s 50% 50% 50% 0.027908s 90% 0.028414s0.027908s0.030253s 90% 99% 0.027908s 0.030253s 99% 0.030253s 90% 0.028414s 99% 0.028414s Write: 10% 0.029999s 30% 0.029999s 50% 0.029999s 90% 0.029999s 99% Write: 10% 0.028215s 30% 0.028215s 50% 0.028215s 90% 0.028215s 99% 0.028215s 0.029999s Write: 10% 0.027834s 30% 0.027834s 50% 0.027834s 90% 0.027834s 99% 0.027834s Write: 10% 0.027669s 30% 0.027669s 50% 0.027669s 90% 0.027669s 99% 0.027669s Write: 10% 0.028264s 30% 0.028264s 50% 0.028264s 90% 0.028264s 99% 0.028264s Write: 10% 0.022130s 30% 0.022130s 50% 0.022130s 90% 0.022130s 99% 0.022130s Write: 10% 0.034192s 30% 0.034192s 50% 0.034192s 90% 0.034192s 99% 0.034192s Read: 10% 0.203022s 30% 0.203022s 50% 0.203022s 90% 0.203022s 99% 0.203022s Step 3. write modify Write: 10% 0.035230s 30% 0.035230s 50% 0.035230s 90% 0.035230s 99% 0.035230s Write: 10% 0.039465s 30% 0.039465s 50% 0.039465s 90% 0.039465s 99% 0.039465s Write: 10% 0.040166s 30% 0.040166s 50% 0.040166s 90% 0.040166s 99% 0.040166s Write: 10% 0.040363s 30% 0.040363s 50% 0.040363s 90% 0.040363s 99% 0.040363s Write: 10% 0.039488s 30% 0.039488s 50% 0.039488s 90% 0.039488s 99% 0.039488s Write: 10% Write: 10% 0.045508s 30% 0.045508s 50% 0.045508s 90% 0.045508s 99% 0.045508s0.045363s 30% 0.045363s 50% 0.045363s 90% 0.045363s 99% 0.045363s Write: 10% 0.051333s 30% 0.051333s 50% 0.051333s 90% 0.051333s 99% 0.051333s Write: 10% 0.053750s 30% 0.053750s 50% 0.053750s 90% 0.053750s 99% 0.053750s Write: 10% 0.058477s 30% 0.058477s 50% 0.058477s 90% 0.058477s 99% 0.058477s Write: 10% 0.060861s 30% 0.060861s 50% 0.060861s 90% 0.060861s 99% 0.060861s Write: 10% 0.063277s 30% 0.063277s 50% 0.063277s 90% 0.063277s 99% 0.063277s Write: 10% 0.064247s 30% 0.064247s 50% 0.064247s 90% 0.064247s 99% 0.064247s Write: 10% 0.063663s 30% 0.063663s 50% 0.063663s 90% 0.063663s 99% 0.063663s Write: 10% Write: 10% 0.069696s 30% 0.069696s0.071620s 30% 0.071620s 50% 0.071620s 90% 0.071620s 99% 0.071620s 50% 0.069696s 90% 0.069696s 99% 0.069696s Write: 10% 0.081759s 30% 0.081759s 50% 0.081759s 90% 0.081759s 99% 0.081759s Write: 10% Write: 10% 0.092381s0.088870sWrite: 10% 0.092593s 30% 30% 30% 0.088870s 50% 0.092381s 50% 0.092381s 90% 0.092381s 99% 0.092381s0.088870s 90% 0.088870s 99% 0.088870s 0.092593s 50% 0.092593s 90% 0.092593s 99% 0.092593s Write: 10% 0.088647s 30% 0.088647s 50% 0.088647s 90% 0.088647s 99% 0.088647s Write: 10% 0.093096s 30% 0.093096s 50% 0.093096s 90% 0.093096s 99% 0.093096s Write: 10% 0.094226s 30% 0.094226s 50% 0.094226s 90% 0.094226s 99% 0.094226s Write: 10% 0.093533s 30% 0.093533s 50% 0.093533s 90% 0.093533s 99% 0.093533s Write: 10% 0.092052s 30% 0.092052s 50% 0.092052s 90% 0.092052s 99% 0.092052s Write: 10% 0.091246s 30% 0.091246s 50% 0.091246s 90% 0.091246s 99% 0.091246s Write: 10% 0.095071s 30% 0.095071s 50% 0.095071s 90% 0.095071s 99% 0.095071s Write: 10% 0.091981s 30% 0.091981s 50% 0.091981s 90% 0.091981s 99% 0.091981s Write: 10% 0.095629s 30% 0.095629s 50% 0.095629s 90% 0.095629s 99% 0.095629s Write: 10% Write: 10% 0.090166s 30% Write0.092888s 30% 0.092888s 50% : 10% 0.090166s 50% 0.092888s0.090166s0.091344s 90% 0.092888s 99% 30% 90% 0.090166s 99% 0.090166s 0.092888s 0.091344s 50% 0.091344s 90% 0.091344s 99% 0.091344s Write: 10% 0.086328s 30% 0.086328s 50% 0.086328s 90% Write: 10% 0.093971s0.086328s 99% 0.086328s 30% 0.093971s 50% 0.093971s 90% 0.093971s 99% 0.093971s Write: 10% 0.091138s 30% 0.091138s 50% 0.091138s 90% 0.091138s 99% 0.091138s Write: 10% 0.091990s 30% 0.091990s 50% 0.091990s 90% 0.091990s 99% 0.091990s Write: 10% 0.094522s 30% 0.094522s 50% 0.094522s 90% 0.094522s 99% 0.094522s Write: 10% 0.095136s 30% 0.095136s 50% 0.095136s 90% 0.095136s 99% 0.095136s Write: 10% 0.091901s 30% 0.091901s 50% 0.091901s 90% 0.091901s 99% 0.091901s Write: 10% 0.091287s 30% 0.091287s 50% 0.091287s 90% Write: 10% Write0.091287s: 10% 99% 0.094861s0.091287s0.090823s 30% 0.090823s 50% 0.090823s 90% 0.090823s 99% 0.090823s WriteWrite: 10% : 10% 0.087723s 30% 0.087723s 50% Write: 10% 0.086610s 30% 0.086610s 50% 0.086610s 90% 0.087723s0.090786s 30% 0.090786s 50% 90% 0.087723s 99% 0.087723s 0.086610s 99% 0.086610s Write: 10% 0.082164s 30% 0.082164s 50% 0.082164s 90% 0.082164s 99% 0.082164s Write: 10% 0.095498s 30% 0.095498s 50% 0.095498s 90% 0.095498s 99% 0.095498s 0.090786s 90% 0.090786s 99% Write: 10% 0.086576s 30% 0.086576s 50% 0.086576s 90% 0.090786sWrite: 10% 0.086576sWrite: 10% 0.086597s 30% 0.086597s 50% 0.086597s 90% 0.086597s0.088407sWrite 30% 0.088407s 99% Write0.086597s : 10% 0.088527s 30% 0.088527s 50% 0.088527s 90% 0.088527s 99% 0.088527s Write: 10% 0.088144s 30% 0.088144s 50% 0.088144s 90% 0.088144s 99% 0.088144s Write: 10% 0.086640s 30% 0.086640s 50% 0.086640s 90% 0.086640s 99% 0.086640s Write: 10% 0.083298s 30% 0.083298s 50% 0.083298s 90% 0.083298s 99% 0.083298s Write: 10% WriteWrite0.085182s 30% 0.085182s 50% : 10% WriteWrite: 10% 0.085182s0.075303s: 10% 30% 0.075303s 50% 0.075303s 90% 0.075303s 99% 0.075303s 0.086814s 30% 0.086814s 50% 0.086814s 90% 0.086814s 99% 0.086814s : 10% 0.083290s 30% 0.083290s 50% 0.083290s 90% 0.083290s 99% 0.083290s 90% 0.085182s 99% 0.085182s 50% 0.088407s 90% 0.088407s 99% 0.088407s Write: 10% 0.091999s 30% 0.091999s 50% 0.091999s 90% 0.091999s 99% 0.091999s : 10% 0.084628s 30% 0.084628s 50% 0.084628s 90% 0.084628s 99% 0.084628s 30% 0.094861s 50% 0.094861s 90% 0.094861s 99% 0.094861s 99% 0.086576s Write: 10% 0.084492s 30% 0.084492s 50% 0.084492s 90% 0.084492s 99% 0.084492s 0.096230s 30% 0.096230s 50% 0.096230s 90% 0.096230s 99% 0.096230s Write: 10% 0.093532s 30% 0.093532s 50% 0.093532s 90% 0.093532s 99% 0.093532s Write: 10% 0.090046s 30% 0.090046s 50% 0.090046s 90% 0.090046s 99% 0.090046s Update: 10% 0.088061s 30% 0.088061s 50% 0.088061s 90% 0.088061s 99% 0.088061s Step 4. read modify write Write: 10% 0.045599s 30% 0.045599s 50% 0.045599s 90% 0.045599s 99% 0.045599s Write: 10% 0.045740s 30% 0.045740s 50% 0.045740s 90% 0.045740s 99% 0.045740s Write: 10% 0.065851s 30% 0.065851s 50% 0.065851s 90% 0.065851s 99% 0.065851s Write: 10% 0.070191s 30% 0.070191s 50% 0.070191s 90% 0.070191s 99% 0.070191s Write: 10% 0.077956s 30% 0.077956s 50% 0.077956s 90% 0.077956s 99% 0.077956s Write: 10% 0.085419s 30% 0.085419s 50% 0.085419s 90% 0.085419s 99% 0.085419s Write: 10% 0.092667s 30% 0.092667s 50% 0.092667s 90% 0.092667s 99% 0.092667s Write: 10% 0.096894s 30% 0.096894s 50% 0.096894s 90% 0.096894s 99% 0.096894s Write: 10% 0.102485s 30% 0.102485s 50% 0.102485s 90% 0.102485s 99% 0.102485s Write: 10% 0.107937s 30% 0.107937s 50% 0.107937s 90% Write: 10% 0.104667s 30% 0.104667s 50% 0.104667s0.107937s 99% 0.107937s 90% 0.104667s 99% 0.104667s Write: 10% 0.111331s 30% 0.111331s 50% 0.111331s 90% 0.111331s 99% 0.111331s Write: 10% 0.112014s 30% 0.112014s 50% 0.112014s 90% 0.112014sWrite: 10% 0.110341s 30% 0.110341s 99% 0.112014s 50% 0.110341s 90% 0.110341s 99% 0.110341s Write: 10% 0.111879s 30% 0.111879s 50% 0.111879s 90% 0.111879s 99% 0.111879s Write: 10% 0.116950s 30% 0.116950s 50% 0.116950s 90% 0.116950s 99% 0.116950s Write: 10% 0.114943s 30% 0.114943s 50% 0.114943s 90% 0.114943s 99% 0.114943s Write: 10% 0.110707s 30% 0.110707s 50% 0.110707s 90% 0.110707s 99% 0.110707s Write: 10% 0.081405s 30% 0.081405s 50% 0.081405s 90% 0.081405s 99% 0.081405s Write: 10% 0.084621s 30% 0.084621s 50% 0.084621s 90% 0.084621s 99% 0.084621s Write: 10% 0.088602s 30% 0.088602s 50% 0.088602s 90% 0.088602s 99% 0.088602s Write: 10% 0.117767s 30% 0.117767s 50% 0.117767s 90% 0.117767s 99% 0.117767s Write: 10% 0.094716s 30% 0.094716s 50% 0.094716s 90% 0.094716s 99% 0.094716s Write: 10% 0.082444s 30% 0.082444s 50% 0.082444s 90% 0.082444s 99% 0.082444s Write: 10% 0.097360s 30% 0.097360s 50% 0.097360s 90% 0.097360s 99% 0.097360s Write: 10% 0.107619s 30% 0.107619s 50% 0.107619s 90% 0.107619s 99% 0.107619s Write: 10% 0.081897s 30% 0.081897s 50% 0.081897s 90% 0.081897s 99% 0.081897s Write: 10% 0.066770s 30% 0.066770s 50% 0.066770s 90% 0.066770s 99% 0.066770s Write: 10% 0.067481s 30% 0.067481s 50% 0.067481s 90% 0.067481s 99% 0.067481s Write: 10% 0.122414s 30% 0.122414s 50% 0.122414s 90% 0.122414s 99% 0.122414s Write: 10% 0.088080s 30% 0.088080s 50% 0.088080s 90% 0.088080s 99% 0.088080s Write: 10% 0.064000s 30% 0.064000s 50% 0.064000s 90% 0.064000s 99% 0.064000s Write: 10% 0.122593s 30% 0.122593s 50% 0.122593s 90% 0.122593s 99% 0.122593s Write: 10% 0.093379s 30% 0.093379s 50% 0.093379s 90% 0.093379s 99% 0.093379s Write: 10% 0.121568s 30% 0.121568s 50% 0.121568s 90% 0.121568s 99% 0.121568s Write: 10% 0.106089s 30% 0.106089s 50% 0.106089s 90% 0.106089s 99% 0.106089s Write: 10% 0.177355s 30% 0.177355s 50% 0.177355s 90% 0.177355s 99% 0.177355s Write: 10% Write: 10% 0.135532s 30% 0.105339s 30% 0.135532s 50% 0.135532s 90% 0.135532s 99% 0.135532s 0.105339s 50% 0.105339s 90% 0.105339s 99% 0.105339s Write: 10% 0.115033s 30% 0.115033s 50% 0.115033s 90% 0.115033s 99% 0.115033s Write: 10% 0.139642s 30% 0.139642s 50% 0.139642s 90% 0.139642s 99% 0.139642s Write: 10% 0.133348s 30% 0.133348s 50% 0.133348s 90% 0.133348s 99% 0.133348s Write: 10% 0.133428s 30% 0.133428s 50% 0.133428s 90% 0.133428s 99% 0.133428s Write: 10% 0.160267s 30% 0.160267s 50% 0.160267s 90% 0.160267s 99% 0.160267s Write: 10% 0.123706s 30% 0.123706s 50% 0.123706s 90% 0.123706s 99% 0.123706s Write: 10% 0.180452s 30% 0.180452s 50% 0.180452s 90% 0.180452s 99% 0.180452s Write: 10% 0.112640s 30% 0.112640s 50% 0.112640s 90% 0.112640s 99% 0.112640s Write: 10% 0.117373s 30% 0.117373s 50% 0.117373s 90% 0.117373s 99% 0.117373s Write: 10% 0.127150s 30% 0.127150s 50% 0.127150s 90% 0.127150s 99% 0.127150s Write: 10% 0.129652s 30% 0.129652s 50% 0.129652s 90% 0.129652s 99% 0.129652s Write: 10% 0.143542s 30% 0.143542s 50% 0.143542s 90% 0.143542s 99% 0.143542s Write: 10% 0.142574s 30% 0.142574s 50% 0.142574s 90% 0.142574s 99% 0.142574s Write: 10% 0.160108s 30% 0.160108s 50% 0.160108s 90% 0.160108s 99% 0.160108s Write: 10% 0.144376s 30% 0.144376s 50% 0.144376s 90% 0.144376s 99% 0.144376s Write: 10% 0.149534s 30% 0.149534s 50% 0.149534s 90% 0.149534s 99% 0.149534s Write: 10% 0.178296s 30% 0.178296s 50% 0.178296s 90% 0.178296s 99% 0.178296s Write: 10% 0.148879s 30% 0.148879s 50% 0.148879s 90% 0.148879s 99% 0.148879s Write: 10% 0.142517s 30% 0.142517s 50% 0.142517s 90% 0.142517s 99% 0.142517s Write: 10% 0.212694s 30% 0.212694s 50% 0.212694s 90% 0.212694s 99% 0.212694s Write: 10% 0.149005s 30% 0.149005s 50% 0.149005s 90% 0.149005s 99% 0.149005s Write: 10% 0.149226s 30% 0.149226s 50% 0.149226s 90% 0.149226s 99% 0.149226s Write: 10% 0.149106s 30% 0.149106s 50% 0.149106s 90% 0.149106s 99% 0.149106s Write: 10% 0.142783s 30% 0.142783s 50% 0.142783s 90% 0.142783s 99% 0.142783s Write: 10% 0.162613s 30% 0.162613s 50% 0.162613s 90% 0.162613s 99% 0.162613s Update: 10% 0.043083s 30% 0.043083s 50% 0.160801s 90% 0.160801s 99% 0.160801s Read: 10% 0.567326s 30% 0.567326s 50% 0.567326s 90% 0.567326s 99% 0.567326s |97.2%| [TM] {RESULT} ydb/tests/olap/high_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:125:2058] recipient: [1:109:2141] 2025-05-05T03:23:04.103799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:23:04.103831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:04.103837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:23:04.103842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:23:04.103852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:23:04.103855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:23:04.103864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:04.103878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:23:04.103982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:23:04.104095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:23:04.117442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:23:04.117470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:23:04.120330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:23:04.120372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:23:04.120416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:23:04.123302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:23:04.123403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:23:04.123517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:04.123606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:23:04.124947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:04.125486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:04.125505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:04.125583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:23:04.125594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:04.125602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:23:04.125626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.127162Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T03:23:04.147749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:04.147829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.147907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:23:04.147965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:04.147978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.148822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:04.148875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:23:04.148931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.148942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:23:04.148946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:23:04.148951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:23:04.149553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.149568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:04.149574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:23:04.150070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.150085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.150091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:04.150098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:23:04.150701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:04.151196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:23:04.151238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:23:04.151432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:04.151461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:04.151469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:04.151534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:23:04.151543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:04.151571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:23:04.151584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:23:04.152180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:04.152190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:04.152228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:04.152234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:23:04.152303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:04.152311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:23:04.152323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:04.152327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:04.152332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:04.152335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:04.152339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:23:04.152344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:04.152349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:23:04.152353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:23:04.152365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:04.152370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:23:04.152374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:23:04.152686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:23:04.152708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 44 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:37.917052Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:37.917055Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T03:23:37.917059Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-05T03:23:37.917063Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T03:23:37.917070Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T03:23:37.917075Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:127:2152] 2025-05-05T03:23:37.917782Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:37.917887Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T03:23:37.917907Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:23:37.917918Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:23:37.917926Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:37.917931Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:23:37.917936Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-05T03:23:37.920580Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:37.920613Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:23:37.920621Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:476:2437] TestWaitNotification: OK eventTxId 102 2025-05-05T03:23:37.920846Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-05T03:23:37.920885Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 58us result status StatusSuccess 2025-05-05T03:23:37.921015Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 1 2025-05-05T03:23:37.921087Z node 5 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-05-05T03:23:37.921828Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:37.921841Z node 5 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 102 2025-05-05T03:23:37.921852Z node 5 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-05-05T03:23:37.921861Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:37.921880Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-05-05T03:23:37.921885Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:37.921902Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2025-05-05T03:23:37.921909Z node 5 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-05-05T03:23:37.921924Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:37.922772Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:37.922806Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-05-05T03:23:37.922838Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, at schemeshard: 72057594046678944 2025-05-05T03:23:37.923493Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:23:37.923575Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, operation: DROP DIRECTORY, path: /MyRoot/export-102 2025-05-05T03:23:37.923607Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-05-05T03:23:37.923617Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-05-05T03:23:37.923626Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:37.923630Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-05-05T03:23:37.923637Z node 5 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-05-05T03:23:37.923661Z node 5 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-05-05T03:23:37.924150Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T03:23:37.924190Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T03:23:37.924212Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T03:23:37.924238Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T03:23:37.924243Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T03:23:37.924247Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T03:23:37.924252Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-05T03:23:37.924708Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2025-05-05T03:23:37.924775Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-05T03:23:37.924782Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-05T03:23:37.924861Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-05T03:23:37.924875Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:23:37.924880Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:693:2650] TestWaitNotification: OK eventTxId 102 >> test.py::test[aggregate-group_by_rollup_grouping_hum-] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter-] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] >> test.py::test[aggregate-group_by_expr_order_by_expr-] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping-] >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_FULL-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] >> test.py::test[blocks-date_less_or_equal_scalar-] [GOOD] >> test.py::test[blocks-distinct_mixed_all-] >> test.py::test[aggregate-group_by_session-] [GOOD] >> ydb-tests-functional-encryption::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default] [SKIPPED] >> test.py::test[blocks-combine_all_avg_filter_opt-] >> tool::import_test [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter-] [GOOD] >> test.py::test[aggregate-having_distinct_expr-] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt] [GOOD] >> test.py::test[aggregate-compare_by_tuple-] >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 |97.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |97.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/import_test >> tool::import_test [GOOD] |97.3%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |97.3%| [TS] {RESULT} ydb/tests/stability/tool/import_test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] [GOOD] >> test.py::test[action-action_eval_cluster_table_for-] [SKIPPED] >> test.py::test[action-eval_anon_table-] [SKIPPED] >> test.py::test[action-insert_after_eval_xlock-] [SKIPPED] >> test.py::test[aggr_factory-avg-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf-] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 |97.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root >> test.py::test[aggregate-group_by_gs_grouping-] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct_compact-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gb_ru-] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] >> ydbd_slice::import_test [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt-] [GOOD] >> test.py::test[blocks-combine_all_pg-] |97.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/ydbd_slice/bin/import_test >> ydbd_slice::import_test [GOOD] >> test_validation.py::TestS3::test_empty[v2-client0] [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |97.3%| [TS] {RESULT} ydb/tools/ydbd_slice/bin/import_test >> test_validation.py::TestS3::test_nested_issues[v1-client0] >> ydb-tests-sql::import_test [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] [GOOD] >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] >> test.py::test[blocks-distinct_mixed_all-] [GOOD] >> test.py::test[blocks-minmax_tuple-] >> test.py::test[aggregate-having_distinct_expr-] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt] |97.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |97.3%| [TS] {RESULT} ydb/tests/sql/import_test >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T03:23:05.538792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T03:23:05.538820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:05.538826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T03:23:05.538831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T03:23:05.538841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T03:23:05.538845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T03:23:05.538853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T03:23:05.538868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T03:23:05.538969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T03:23:05.539040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T03:23:05.551716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T03:23:05.551740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T03:23:05.568761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T03:23:05.570306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T03:23:05.570383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T03:23:05.579664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T03:23:05.579732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T03:23:05.579821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:05.580042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T03:23:05.581105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:05.581377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:05.581390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:05.581407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T03:23:05.581414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:05.581421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T03:23:05.581450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.583003Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T03:23:05.602432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T03:23:05.602513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.602608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T03:23:05.602663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T03:23:05.602675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.604244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:05.604283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T03:23:05.604342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.604354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T03:23:05.604360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T03:23:05.604366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T03:23:05.604894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.604908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T03:23:05.604914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T03:23:05.605299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.605311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.605317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:05.605325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T03:23:05.605961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T03:23:05.606431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T03:23:05.606478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T03:23:05.606674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:05.606703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T03:23:05.606710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:05.606777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T03:23:05.606784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T03:23:05.606821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T03:23:05.606834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T03:23:05.607364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T03:23:05.607375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:05.607423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T03:23:05.607429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T03:23:05.607500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T03:23:05.607508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T03:23:05.607523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:05.607528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:05.607533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T03:23:05.607536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:05.607541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T03:23:05.607545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T03:23:05.607549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T03:23:05.607553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T03:23:05.607565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T03:23:05.607572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T03:23:05.607576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T03:23:05.607900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T03:23:05.607921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-05T03:23:32.024000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-05T03:23:32.024048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-05-05T03:23:32.024064Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-05T03:23:32.024071Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-05T03:23:32.024079Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-05-05T03:23:32.034520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-05T03:23:35.561207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0011 2025-05-05T03:23:35.582059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0011 2025-05-05T03:23:35.612814Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-05T03:23:35.612923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-05T03:23:35.612963Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-05T03:23:35.613002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-05-05T03:23:35.613018Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-05T03:23:35.613027Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-05T03:23:35.613035Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-05-05T03:23:35.623272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-05T03:23:39.153915Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.001 2025-05-05T03:23:39.174791Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2025-05-05T03:23:39.216101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-05T03:23:39.216211Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-05T03:23:39.216243Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-05T03:23:39.216287Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-05-05T03:23:39.216303Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-05T03:23:39.216312Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-05T03:23:39.216321Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-05-05T03:23:39.226607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-05T03:23:42.696375Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [4:572:2530], attempt# 1 2025-05-05T03:23:42.700026Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:571:2529] 2025-05-05T03:23:42.701304Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:572:2530], sender# [4:571:2529] 2025-05-05T03:23:42.701323Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:571:2529] 2025-05-05T03:23:42.701348Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:572:2530], sender# [4:571:2529], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-05-05T03:23:42.701417Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:572:2530], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19189 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 883DB337-11B1-44F4-9770-354E3A6F4299 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-05-05T03:23:42.703563Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:572:2530], result# 2025-05-05T03:23:42.703642Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:571:2529], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T03:23:42.706656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:23:42.706676Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-05T03:23:42.706701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:23:42.706716Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T03:23:42.706731Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T03:23:42.706736Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:42.706741Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T03:23:42.706748Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T03:23:42.706794Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T03:23:42.707477Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:42.707584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T03:23:42.707596Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T03:23:42.707609Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:23:42.707614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:42.707620Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T03:23:42.707623Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:42.707629Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T03:23:42.707650Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710759 2025-05-05T03:23:42.707657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T03:23:42.707662Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T03:23:42.707667Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T03:23:42.707692Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T03:23:42.708209Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T03:23:42.708225Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T03:23:42.708881Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T03:23:42.708896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:592:2547] TestWaitNotification: OK eventTxId 102 >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test.py::test[aggregate-compare_by_tuple-] [GOOD] >> test.py::test[aggregate-group_by_hop-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_bad_delay-] >> test.py::test[aggregate-group_by_hop_bad_delay-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star-] [SKIPPED] >> test.py::test[aggregate-group_by_session_distinct_compact-] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] >> test.py::test[aggregate-group_by_mul_gb_ru-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] [GOOD] >> test.py::test[action-eval_column-] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] [GOOD] >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00072a/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00072a/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1418515) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1420393 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |97.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> test_validation.py::TestS3::test_nested_issues[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] [GOOD] |97.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> test_validation.py::TestS3::test_nested_issues[v2-client0] >> BulkUpsert::BulkUpsert [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf-] [GOOD] >> test.py::test[aggregate-group_by_expr-] >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt] [GOOD] >> test.py::test[aggregate-list_nullable-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root [GOOD] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] >> test.py::test[blocks-minmax_tuple-] [GOOD] >> test.py::test[blocks-sort_two_mix-] >> test_simple.py::TestSimple::test_multi[alter_table] >> test.py::test[blocks-combine_all_pg-] [GOOD] >> test.py::test[blocks-combine_hashed_avg-] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] >> test.py::test[aggregate-group_by_session_distinct_compact-] [GOOD] >> test.py::test[aggregate-having_cast-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] >> test.py::test[aggregate-group_by_expr-] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] [GOOD] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] >> test.py::test[aggregate-list_nullable-] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] [GOOD] >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |97.4%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |97.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [GOOD] >> test.py::test[action-eval_column-] [GOOD] >> test.py::test[action-eval_input_output_table-] [SKIPPED] >> test.py::test[action-unwrap_runtime_fail_with_column_message-] [SKIPPED] >> test.py::test[aggr_factory-max-default.txt] [SKIPPED] >> test.py::test[aggr_factory-some-default.txt] [SKIPPED] >> test.py::test[aggr_factory-variance-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_FULL-client0] [GOOD] >> test.py::test[blocks-combine_hashed_avg-] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys-] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] >> test_validation.py::TestS3::test_nested_issues[v2-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] [GOOD] >> test_validation.py::TestS3::test_nested_type[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] >> test.py::test[blocks-sort_two_mix-] [GOOD] >> test.py::test[blocks-string_filter-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] >> test.py::test[bigdate-table_arithmetic-default.txt] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test.py::test[binding-named_node_corr_names-default.txt] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000762/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000762/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1400550) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000762/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/test_insert.py.TestS3.test_insert.v1-client0-json_list-dataset/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000762/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/test_insert.py.TestS3.test_insert.v1-client0-json_list-dataset/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1404089 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] [GOOD] >> KqpTpch::Query22 [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] >> test.py::test[aggregate-having_cast-default.txt] [GOOD] >> test.py::test[blocks-add_int8-] >> test.py::test[aggregate-group_by_rollup_aggr_expr-] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] [GOOD] >> test.py::test[blocks-string_filter-] [GOOD] >> test.py::test[column_group-groups-single] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt] [GOOD] >> test.py::test[aggregate-compact_distinct-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_mul_ru_ru-] >> KqpQuerySession::NoLocalAttach >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] >> test.py::test[binding-named_node_corr_names-default.txt] [GOOD] >> ydb-library-yaml_config-ut_transform::import_test [GOOD] >> test.py::test[blocks-bitcast_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] [GOOD] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_PROFILE-client0] >> KqpQueryService::ReplyPartLimitProxyNode >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] |97.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |97.4%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/import_test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct-] [GOOD] >> test.py::test[aggregate-list_with_fold_map-] >> test.py::test[blocks-combine_hashed_sum_many_keys-] [GOOD] >> test.py::test[blocks-decimal_comparison-] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] >> test.py::test[action-combine_subqueries_with_table_param-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bottom-default.txt] [SKIPPED] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt] >> test.py::test[blocks-interval_add_date_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] >> ydb-tests-stress-log-tests::import_test [GOOD] >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_statistics.py::TestS3::test_precompute[v2-client0] |97.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |97.4%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_extended-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] [GOOD] >> test.py::test[column_group-groups-single] [GOOD] >> test.py::test[column_order-join_nosimple-] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |97.5%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> test.py::test[blocks-add_int8-] [GOOD] >> test.py::test[blocks-add_uint8-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt] >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> test.py::test[aggregate-group_by_mul_ru_ru-] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] [GOOD] >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |97.5%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] [GOOD] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test.py::test[blocks-interval_add_date_scalar-] [GOOD] >> test.py::test[blocks-tuple_type-] >> test.py::test[action-eval_filter-] [SKIPPED] >> test.py::test[aggregate-group_by_session_extended-] [GOOD] >> test.py::test[blocks-bitcast_scalar-] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] [GOOD] >> test.py::test[aggregate-list_with_fold_map-] [GOOD] |97.5%| [TA] $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolve >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] >> test.py::test[aggregate-group_by_tz_date-] >> test.py::test[action-eval_for_over_subquery-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] [GOOD] >> test.py::test[blocks-combine_all_max_filter-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] >> test.py::test[ansi_idents-order_by-default.txt] >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> test.py::test[blocks-add_uint8-] [GOOD] >> test.py::test[blocks-block_input-aux_columns] [SKIPPED] >> test.py::test[blocks-combine_all_count_filter_opt-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] [GOOD] >> test.py::test[column_order-join_nosimple-] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test.py::test[column_order-select_subquery-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_columns_after_group-default.txt] >> test.py::test[aggregate-ensure_count-default.txt] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] >> test.py::test[aggregate-group_by_tablerow_column-] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] >> test.py::Test::test_add [GOOD] >> test.py::Test::test_add_vars [GOOD] >> test.py::Test::test_create [GOOD] >> test.py::Test::test_expose_var_from_include [GOOD] >> test.py::Test::test_expose_var_from_var [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo-] [GOOD] >> test.py::Test::test_include [GOOD] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolve [GOOD] >> test.py::test[blocks-combine_all_max_filter-] [GOOD] >> test.py::test[blocks-tuple_type-] [GOOD] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] >> test.py::test[aggregate-group_by_rollup_udf-] >> test.py::Test::test_include_from_resource [GOOD] >> KqpFederatedQuery::ExecuteQueryWithExternalTableResolve >> test.py::test[blocks-combine_all_sum-] >> test.py::Test::test_linked_include [GOOD] >> test.py::test[blocks-type_and_callable_stats-] [SKIPPED] >> test.py::test[column_group-hint-perusage] [SKIPPED] >> test.py::Test::test_result_formatter [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt] >> test.py::Test::test_result_formatter_dates [GOOD] >> test.py::Test::test_result_formatter_optional [GOOD] >> test.py::Test::test_result_formatter_zeros [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt] [SKIPPED] >> test.py::test[binding-table_regexp_strict_binding-] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-] [SKIPPED] >> test.py::test[blocks-compare-] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/template/ut/py3test >> test.py::Test::test_result_formatter_zeros [GOOD] >> test_statistics.py::TestS3::test_precompute[v2-client0] [GOOD] >> test_statistics.py::TestS3::test_precompute[v1-client0] |97.5%| [TS] {RESULT} ydb/library/benchmarks/template/ut/py3test >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] [GOOD] >> test.py::test[aggregate-group_by_tz_date-] [GOOD] >> test.py::test[bigdate-table_yt_native-on] [SKIPPED] >> test.py::test[binding-table_concat_binding-default.txt] [SKIPPED] >> test.py::test[blocks-add_decimal-] [SKIPPED] >> test.py::test[blocks-add_uint32-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] >> ydb-core-viewer-tests::import_test [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt] [GOOD] >> test.py::test[action-eval_values_output_table_subquery-] [SKIPPED] |97.5%| [TA] {RESULT} $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpFederatedQuery::ExecuteQueryWithExternalTableResolve [GOOD] >> KqpFederatedQuery::ExecuteScriptWithS3ReadNotCached >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1-] [SKIPPED] >> test.py::test[action-nested_rewrite_io-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt] [GOOD] >> test.py::test[distinct-distinct_window-default.txt] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/import_test >> ydb-core-viewer-tests::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1518069) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |97.5%| [TS] {RESULT} ydb/core/viewer/tests/import_test >> test.py::test[blocks-combine_all_count_filter_opt-] [GOOD] >> test.py::test[blocks-combine_all_decimal-] [SKIPPED] >> test.py::test[blocks-date_sub_interval_scalar-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] |97.5%| [TM] {RESULT} ydb/tests/fq/common/py3test >> ydb-tests-functional-query_cache::import_test [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test.py::test[aggregate-group_by_tablerow_column-] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat] [SKIPPED] >> test.py::test[blocks-combine_hashed_count_filter-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf-] [GOOD] >> test.py::test[aggregate-percentiles_containers-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] |97.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |97.6%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] [GOOD] >> ydb-tests-library-ut::import_test [GOOD] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] >> test.py::test[blocks-compare-] [GOOD] >> test.py::test[blocks-date_add_interval_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] |97.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |97.6%| [TS] {RESULT} ydb/tests/library/ut/import_test >> test.py::test[column_order-select_plain_nosimple-default.txt] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithS3ReadNotCached [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSource >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] >> test.py::Test::test_add_calculated [GOOD] >> test.py::Test::test_add_duplicate [GOOD] >> test.py::Test::test_add_from_file [GOOD] >> test.py::Test::test_add_from_lines [GOOD] >> test.py::Test::test_add_one [GOOD] >> test.py::Test::test_add_one_error [GOOD] >> test.py::Test::test_add_one_with_empty [GOOD] >> test.py::Test::test_add_shame_rate [GOOD] >> test.py::Test::test_add_similar_errors [GOOD] >> test.py::Test::test_add_special [GOOD] >> test.py::Test::test_add_two_error [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] [GOOD] >> test.py::Test::test_build [GOOD] >> test.py::Test::test_build_md [GOOD] >> test.py::Test::test_build_shame_md [GOOD] >> test.py::Test::test_build_shame_sum_md [GOOD] >> test.py::Test::test_build_sum_md [GOOD] >> test.py::Test::test_build_sums [GOOD] >> test.py::Test::test_build_sums_shame [GOOD] >> test.py::Test::test_create [GOOD] >> test.py::test[blocks-add_uint32-] [GOOD] >> test.py::test[blocks-add_uint64_opt-] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] >> test.py::Test::test_display [GOOD] >> test.py::Test::test_immutable_special [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_PROFILE-client0] [GOOD] >> test.py::test[blocks-combine_all_sum-] [GOOD] >> test.py::test[blocks-date_greater_scalar-] >> test.py::test[distinct-distinct_window-default.txt] [GOOD] >> test.py::test[expr-constraints_of-] >> test_validation.py::TestS3::test_nested_type[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/report/ut/py3test >> test.py::Test::test_immutable_special [GOOD] Test command err: ydb/library/benchmarks/report/__init__.py:187: DeprecationWarning: the 'MARKDOWN' constant is deprecated, use the 'TableStyle' enum instead self.t.set_style(prettytable.MARKDOWN) |97.6%| [TS] {RESULT} ydb/library/benchmarks/report/ut/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_validation.py::TestS3::test_nested_type[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] >> ydb-tests-functional-scheme_tests::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] >> test.py::test[blocks-date_sub_interval_scalar-] [GOOD] >> test.py::test[blocks-exists-] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] >> test.py::test[solomon-UnknownSetting-] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt] [GOOD] >> test.py::test[aggr_factory-every-default.txt] [SKIPPED] >> test.py::test[aggr_factory-multi-] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] >> test.py::test[aggregate-percentiles_containers-] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt] >> KqpFederatedQuery::ExecuteScriptWithDataSource [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdb >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] [GOOD] |97.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |97.6%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] >> test_statistics.py::TestS3::test_precompute[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_sum[v2-client0] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_NONE-client0] >> test.py::test[blocks-date_add_interval_scalar-] [GOOD] >> test.py::test[blocks-member-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] [GOOD] >> test.py::test[action-eval_atom_wrong_type_expr-] >> test.py::test[distinct-distinct_by_tuple-default.txt] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] >> test.py::test[action-eval_atom_wrong_type_expr-] [SKIPPED] >> test.py::test[action-eval_folder-] [SKIPPED] >> test.py::test[action-eval_folder_via_file-] [SKIPPED] >> test.py::test[action-eval_regexp-] [SKIPPED] >> test.py::test[distinct-distinct_count_only-default.txt] >> ydb-tests-functional-script_execution::import_test [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] [GOOD] >> test.py::test[aggregate-compare_tuple-] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] >> integrations_test.py::test_read_jtest_results[o/OK] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] [GOOD] >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] |97.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |97.6%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test >> test.py::test[blocks-combine_hashed_count_filter-] [GOOD] >> test.py::test[blocks-date_add_interval-] >> simple_queue::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] [GOOD] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |97.6%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] >> test.py::test[blocks-add_uint64_opt-] [GOOD] >> test.py::test[blocks-combine_all_sum_filter-] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdb [GOOD] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPragma >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only [GOOD] |97.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/import_test >> simple_queue::import_test [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/177e/000505/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/177e/000505/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) |97.7%| [TS] {RESULT} ydb/tests/stress/simple_queue/import_test |97.7%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs-] [SKIPPED] >> test.py::test[aggregate-aggregation_and_order-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt] [GOOD] >> test.py::test[bigdate-tz_table_pull-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] >> test.py::test[blocks-exists-] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] [GOOD] >> test.py::test[blocks-interval_mul-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] >> test.py::test[expr-constraints_of-] [GOOD] >> test.py::test[expr-empty_iterator2-] >> test.py::test[aggr_factory-multi-] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_key_column-default.txt] >> test.py::test[distinct-distinct_count_only-default.txt] [GOOD] >> test.py::test[dq-blacklisted_pragmas1-] [SKIPPED] >> test.py::test[dq-truncate_local-default.txt] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append-] [SKIPPED] >> test.py::test[flatten_by-flatten_with_subquery-default.txt] [SKIPPED] >> test.py::test[hor_join-out_max_outtables-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] [GOOD] >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> test.py::test[aggregate-compare_tuple-] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPragma [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdbCheckPragma >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test.py::test[blocks-date_greater_scalar-] [GOOD] >> test.py::test[blocks-decimal_op_decimal-] [SKIPPED] >> test.py::test[blocks-decimal_unary-] [SKIPPED] >> test.py::test[blocks-distinct_pure_all-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] |97.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |97.7%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test[bigdate-tz_table_pull-] [GOOD] >> test.py::test[blocks-add_uint64_opt2-] >> test.py::test[aggregate-aggregation_and_order-default.txt] [GOOD] >> test.py::test[aggregate-group_by_expr_dict-] >> test.py::test[blocks-member-] [GOOD] >> test.py::test[column_group-hint_unk_col_fail-] [SKIPPED] >> test.py::test[column_order-select_action-default.txt] >> test.py::test[blocks-combine_all_sum_filter-] [GOOD] >> test.py::test[blocks-complex_scalars-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] >> NodeIdDescribe::HasDistribution [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] >> test.py::test[blocks-date_add_interval-] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] >> test.py::test[aggregate-group_by_expr_semi_join-] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo-] >> test.py::test[blocks-interval_mul-] [GOOD] >> test.py::test[case-case_then_else-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdbCheckPragma [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceAndTablePathPrefix >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] >> test.py::test[action-empty_do-default.txt] >> test.py::test[aggregate-aggregate_key_column-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] >> test.py::test[insert-append_sorted-to_sorted] [SKIPPED] >> test.py::test[insert-append_sorted-to_sorted_calc] [SKIPPED] >> test.py::test[insert-append_with_read_udf_fail-] [SKIPPED] >> test.py::test[insert-multiappend_sorted-default.txt] [SKIPPED] >> test.py::test[insert-override-] [SKIPPED] >> test.py::test[insert-yql-13083-] [SKIPPED] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test.py::test[join-alias_where_group-off] [SKIPPED] >> test.py::test[join-equi_join_three_simple-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test.py::test[blocks-add_uint64_opt2-] [GOOD] >> test.py::test[blocks-combine_all_avg-] >> test.py::test[expr-empty_iterator2-] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] >> test.py::test[blocks-distinct_pure_all-] [GOOD] >> test.py::test[blocks-not-] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] [GOOD] >> test.py::test[blocks-complex_scalars-] [GOOD] >> test.py::test[blocks-date_group_by-] [SKIPPED] >> test.py::test[blocks-distinct_opt_state_keys-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] >> KqpFederatedQuery::ExecuteScriptWithDataSourceAndTablePathPrefix [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDifferentBindingsMode >> test.py::test[action-empty_do-default.txt] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job-] [SKIPPED] >> test.py::test[action-eval_table_with_view-default.txt] [SKIPPED] >> test.py::test[action-runtime_if_select-default.txt] [SKIPPED] >> test.py::test[aggr_factory-list-] [SKIPPED] >> ydb-tests-functional-audit::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] >> test.py::test[aggr_factory-mode-default.txt] [SKIPPED] >> test.py::test[aggregate-agg_phases_table2-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregation_with_named_node-] |97.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |97.7%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest >> test.py::test[aggregate-group_by_expr_dict-] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt] >> test.py::test[aggregate-aggregate_with_lambda-] [GOOD] >> test.py::test[aggregate-error_type-] [SKIPPED] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt] |97.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] |97.7%| [TS] {RESULT} ydb/tests/functional/audit/import_test >> test.py::test[blocks-interval_add_interval_scalar-] [GOOD] >> test.py::test[blocks-json_document_type-] [SKIPPED] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx-] >> test.py::test[aggregate-group_by_gs_alt_duo-] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] >> test.py::test[column_order-select_action-default.txt] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail-] [SKIPPED] >> test.py::test[count-count_all-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] >> test.py::test[blocks-not-] [GOOD] >> test.py::test[blocks-pg_to_interval-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] [GOOD] >> test.py::test[case-case_then_else-default.txt] [GOOD] >> test.py::test[count-count_nullable-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] >> test.py::test[join-equi_join_three_simple-] [GOOD] >> test.py::test[join-inner_all_right-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test.py::test[flatten_by-flatten_expr_struct-default.txt] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-outlimit] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list-] [SKIPPED] >> test.py::test[in-in_with_opt_tuple-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] >> test.py::test[aggregate-aggregation_with_named_node-] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter-] >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct-] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] >> test.py::test[blocks-combine_all_avg-] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] [GOOD] >> test.py::test[blocks-combine_all_some_filter-] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx-] [GOOD] >> test.py::test[column_group-hint_append2-] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail-] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder-] [SKIPPED] >> test.py::test[column_order-select_limit_offset_reorder-default.txt] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_compact-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] [GOOD] >> test.py::test[count-count_nullable-] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] >> test.py::test[blocks-distinct_opt_state_keys-] [GOOD] >> test.py::test[blocks-interval_sub_interval-] >> test.py::test[aggregate-count_distinct_with_filter-] [GOOD] >> test.py::test[aggregate-group_by_hop_compact-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_start-] [SKIPPED] >> test.py::test[aggregate-group_by_ru_join_simple-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt] [GOOD] >> test.py::test[insert-keepmeta_view_fail-] [SKIPPED] >> test.py::test[insert-select_after_replace_unwrap-default.txt] [SKIPPED] >> test.py::test[insert-select_relabel-default.txt] [SKIPPED] >> test.py::test[count-count_all-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] >> test.py::test[join-aggr_diff_order-default.txt] >> test.py::test[join-inner_all_right-] [GOOD] >> test.py::test[join-left_all-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column-] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_NONE-client0] [GOOD] >> test.py::test[blocks-pg_to_interval-] [GOOD] >> test.py::test[case-case_multi_val-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client9-column_type9-False] >> ydb_serializable::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client10-column_type10-False] |97.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/import_test >> ydb_serializable::import_test [GOOD] |97.7%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/import_test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] [GOOD] >> ydb-tests-functional-large_serializable::import_test [GOOD] >> test.py::test[aggregate-group_by_session_compact-] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] >> test.py::test[aggregate-group_by_rollup_column_ref-] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func-] >> KqpFederatedQuery::ExecuteScriptWithDifferentBindingsMode [GOOD] >> KqpFederatedQuery::MultiStatementSelect >> test.py::test[aggregate-group_by_ru_join_simple-] [GOOD] >> test.py::test[aggregate-no_compact_distinct-] [SKIPPED] >> test.py::test[aggregate-percentile_and_avg_grouped-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client11-column_type11-False] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_two_fields-] |97.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client11-column_type11-False] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt] [GOOD] >> test.py::test[distinct-distinct_join-default.txt] >> test.py::test[join-left_all-] [GOOD] >> test.py::test[join-left_join_right_pushdown_null-] >> test.py::test[blocks-combine_all_some_filter-] [GOOD] >> test.py::test[blocks-combine_hashed_count-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client12-column_type12-False] >> test.py::test[column_order-select_limit_offset_reorder-default.txt] [GOOD] >> test.py::test[count-count_distinct_from_view_concat-] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix-] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail-] [SKIPPED] >> test.py::test[expr-non_persistable_order_by_fail-] [SKIPPED] >> test.py::test[file-parse_file_in_select_as_uint64-] [SKIPPED] >> test.py::test[flatten_by-flatten_dict-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client12-column_type12-False] [GOOD] >> test.py::test[case-case_multi_val-default.txt] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail2-] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail-] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail-] [SKIPPED] >> test.py::test[column_group-min_group-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client13-column_type13-False] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> KqpFederatedQuery::MultiStatementSelect [GOOD] >> KqpFederatedQuery::InsertIntoBucket >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client13-column_type13-False] [GOOD] >> test.py::test[blocks-interval_sub_interval-] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client0-column_type0-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client0-column_type0-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client1-column_type1-True] >> test_statistics.py::TestS3::test_sum[v2-client0] [GOOD] >> test.py::test[join-aggr_diff_order-default.txt] [GOOD] >> test.py::test[join-bush_in_in_in-] >> test_statistics.py::TestS3::test_sum[v1-client0] >> test.py::test[aggregate-percentile_and_avg_grouped-] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr-] >> test.py::test[aggregate-group_by_rollup_duo_opt-] [GOOD] >> test.py::test[aggregate-group_by_session_only-] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] [GOOD] >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client1-column_type1-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] >> KqpFederatedQuery::InsertIntoBucket [GOOD] >> KqpFederatedQuery::InsertIntoBucketWithSelect >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client2-column_type2-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client2-column_type2-False] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func-] [GOOD] >> test.py::test[aggregate-group_by_session_nopush-] [SKIPPED] >> test.py::test[aggregate-table_funcs_group_by-default.txt] |97.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/import_test >> test.py::test[blocks-combine_hashed_count-] [GOOD] >> test.py::test[blocks-not_opt-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client3-column_type3-True] >> test.py::test[distinct-distinct_join-default.txt] [GOOD] >> test.py::test[dq-precompute_parallel-] [SKIPPED] >> test.py::test[dq-precompute_parallel_indep-] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> test.py::test[dq-precompute_parallel_indep-] [SKIPPED] >> test.py::test[dq-precompute_tree-default.txt] [SKIPPED] >> test.py::test[expr-as_table_emptylist-] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_column_fail-] [SKIPPED] >> test.py::test[file-where_key_in_file_content-] [SKIPPED] >> test.py::test[flatten_by-flatten_member_is_struct-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client3-column_type3-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client4-column_type4-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test.py::test[flatten_by-flatten_dict-] [GOOD] >> test.py::test[hor_join-skip_sampling-] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq-] >> test.py::test[aggregate-group_compact_sorted_distinct_complex-] [GOOD] >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client4-column_type4-True] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test.py::test[binding-table_range_strict_binding-default.txt] [SKIPPED] >> test.py::test[blocks-add_int32-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client5-column_type5-False] >> test_validation.py::TestS3::test_nested_type[v2-client0] [GOOD] >> test.py::test[flatten_by-flatten_two_fields-] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt] [SKIPPED] >> test.py::test[in-huge_in-default.txt] >> KqpFederatedQuery::InsertIntoBucketWithSelect [GOOD] >> KqpFederatedQuery::InsertIntoBucketCaching >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client5-column_type5-False] [GOOD] >> ydb-tests-fq-restarts::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client6-column_type6-True] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic-] [GOOD] >> test.py::test[blocks-mod_uint64-] |97.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/tests/fq/restarts/import_test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client6-column_type6-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client7-column_type7-True] >> test.py::test[join-left_join_right_pushdown_null-] [GOOD] >> test.py::test[aggregate-group_by_session_only-] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt] >> test.py::test[join-lookupjoin_inner_1o2o-] >> test.py::test[column_group-min_group-default.txt] [GOOD] >> test.py::test[count-count_all_view_concat-] [SKIPPED] >> test.py::test[datetime-date_tz_table_sort_asc-] [SKIPPED] >> KqpFederatedQuery::InsertIntoBucketCaching [GOOD] >> KqpFederatedQuery::InsertIntoBucketValuesCast >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test.py::test[distinct-distinct_count_and_full_count-default.txt] >> test.py::test[aggregate-percentiles_grouped_expr-] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client7-column_type7-True] [GOOD] >> test.py::test[blocks-not_opt-] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client8-column_type8-False] >> test.py::test[blocks-pg_sort-] >> test.py::test[blocks-add_int32-] [GOOD] >> test.py::test[blocks-block_input_mapreduce-] [SKIPPED] >> test.py::test[blocks-block_input_various_types-] [SKIPPED] >> test.py::test[blocks-coalesce_ints-] >> test.py::test[flatten_by-flatten_member_is_struct-] [GOOD] >> test.py::test[hor_join-fuse_multi_outs2-] [SKIPPED] >> test.py::test[hor_join-less_outs-] [SKIPPED] >> test.py::test[hor_join-out_hor_join-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client8-column_type8-False] [GOOD] >> test_workload.py::TestYdbWorkload::test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client9-column_type9-False] >> KqpFederatedQuery::InsertIntoBucketValuesCast [GOOD] >> KqpFederatedQuery::UpdateExternalTable >> test.py::test[join-bush_in_in_in-] [GOOD] >> test.py::test[join-count_bans-off] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client10-column_type10-False] >> test.py::test[join-count_bans-off] [SKIPPED] >> test.py::test[join-filter_joined-off] [SKIPPED] >> test.py::test[join-grace_join1-grace] [SKIPPED] >> test.py::test[join-inner_with_select-off] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off] [SKIPPED] >> test.py::test[join-join_without_column-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client11-column_type11-False] >> test.py::test[aggregate-table_funcs_group_by-default.txt] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client11-column_type11-False] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client12-column_type12-False] >> KqpFederatedQuery::UpdateExternalTable [GOOD] >> KqpFederatedQuery::JoinTwoSources >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] >> test.py::test[join-lookupjoin_inner_1o2o-] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off] >> test.py::test[blocks-mod_uint64-] [GOOD] >> test.py::test[blocks-pg-] >> ydb-tests-stress-kv-tests::import_test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client13-column_type13-False] >> test.py::test[blocks-pg_sort-] [GOOD] >> test.py::test[blocks-string_pass-] >> test.py::test[blocks-coalesce_ints-] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt-] >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq-] [GOOD] |97.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test >> test.py::test[in-in_sorted-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client14-column_type14-False] >> TDqPqRdReadActorTests::TestReadFromTopic2 >> KqpFederatedQuery::JoinTwoSources [GOOD] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPartitionedBy >> test.py::test[in-huge_in-default.txt] [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt] [GOOD] >> test.py::test[bigdate-table_io-default.txt] [SKIPPED] >> test.py::test[binding-drop_binding-] >> test.py::test[in-in_tuple_table-default.txt] [SKIPPED] >> test.py::test[in-in_with_list_dict-default.txt] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration >> test.py::test[binding-drop_binding-] [SKIPPED] >> test.py::test[blocks-add_uint64-] >> test.py::test[hor_join-out_hor_join-default.txt] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client14-column_type14-False] [GOOD] >> test.py::test[hor_join-yield_on-default.txt] [SKIPPED] >> test.py::test[in-basic_in-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client15-column_type15-False] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_BASIC-client0] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt] [GOOD] >> test.py::test[blocks-block_input_various_types-v3] [SKIPPED] >> test.py::test[blocks-filter_direct_col-] >> test.py::test[aggregate-histogram_cdf-default.txt] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt] >> test.py::test[distinct-distinct_count_and_full_count-default.txt] [GOOD] >> test.py::test[epochs-read_modified-] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail-] [SKIPPED] >> test.py::test[file-where_key_in_get_file_content-] [SKIPPED] >> test.py::test[flatten_by-flatten_by_opt_dict-] >> test.py::test[blocks-pg-] [GOOD] >> test.py::test[blocks-pg_from_dates-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client15-column_type15-False] [GOOD] >> TDqPqRdReadActorTests::SessionError >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client0-column_type0-False] >> test.py::test[blocks-string_pass-] [GOOD] >> test.py::test[blocks-string_with-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] >> TDqPqRdReadActorTests::SessionError [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt-] [GOOD] >> test.py::test[blocks-date_less_or_equal-] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] >> TDqPqRdReadActorTests::ReadWithFreeSpace >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPartitionedBy [GOOD] >> KqpFederatedQuery::ExecuteScriptWithEmptyCustomPartitioning >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] [GOOD] >> test.py::test[in-in_sorted-] [GOOD] >> test.py::test[join-join_without_column-] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt] >> test.py::test[join-mapjoin_early_rewrite-off] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client0-column_type0-False] [GOOD] >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] >> test.py::test[join-mapjoin_early_rewrite_sequence-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client1-column_type1-True] >> test.py::test[join-mapjoin_with_empty_read-off] [GOOD] >> test.py::test[join-mergejoin_force_per_link-] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client1-column_type1-True] [GOOD] >> test.py::test[blocks-pg_from_dates-] [GOOD] >> test.py::test[blocks-pg_to_dates-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client2-column_type2-False] >> test.py::test[blocks-string_with-] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt] >> KqpFederatedQuery::ExecuteScriptWithEmptyCustomPartitioning [GOOD] >> KqpFederatedQuery::ExecuteScriptWithTruncatedMultiplyResults >> test.py::test[blocks-add_uint64-] [GOOD] >> test.py::test[blocks-combine_all_count_filter-] >> ydb-tests-datashard-async_replication::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client2-column_type2-False] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client3-column_type3-True] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] |97.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/import_test >> ydb-tests-datashard-async_replication::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/tests/datashard/async_replication/import_test >> test_db_counters.py::TestKqpCounters::test_case [GOOD] >> test.py::test[in-in_types_cast_all-default.txt] [GOOD] >> test.py::test[insert-append-] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged >> test.py::test[join-mergejoin_force_per_link-] [GOOD] >> test.py::test[join-mergejoin_small_primary-] >> test.py::test[insert-append-] [SKIPPED] >> test.py::test[insert-append_view_fail-] [SKIPPED] >> test.py::test[insert_monotonic-from_empty-] [SKIPPED] >> test.py::test[insert_monotonic-non_existing_fail-] [SKIPPED] >> test.py::test[join-do_not_suppres_equijoin_input_sorts-] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client3-column_type3-True] [GOOD] >> test.py::test[join-equi_join_two_mult_keys-off] [SKIPPED] >> test.py::test[join-full_trivial_udf_call-] [SKIPPED] >> test.py::test[join-inner_trivial_from_concat-] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client4-column_type4-True] >> test.py::test[join-join_key_cmp_udf-] >> test.py::test[in-in_with_list_dict-default.txt] [GOOD] >> test.py::test[insert-two_input_tables-] [SKIPPED] >> test.py::test[insert_monotonic-truncate_fail-] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-] [SKIPPED] >> test.py::test[join-filter_joined-] >> test.py::test[in-basic_in-default.txt] [GOOD] >> test.py::test[insert-double_append_to_anonymous-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> test.py::test[flatten_by-flatten_by_opt_dict-] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another-] >> test.py::test[insert-double_append_to_anonymous-] [SKIPPED] >> test.py::test[insert-drop_sortness-desc] [SKIPPED] >> test.py::test[insert-insert_null-default.txt] [SKIPPED] >> test.py::test[insert-override-from_sorted_calc] [SKIPPED] >> test.py::test[blocks-date_less_or_equal-] [GOOD] >> test.py::test[blocks-date_not_equals_scalar-] >> test.py::test[insert-override-with_view] [SKIPPED] >> test.py::test[insert-override_view_fail-] [SKIPPED] >> test.py::test[join-anyjoin_common_dup-] >> test.py::test[blocks-filter_direct_col-] [GOOD] >> test.py::test[blocks-filter_expr-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client4-column_type4-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client5-column_type5-False] >> KqpFederatedQuery::ExecuteScriptWithTruncatedMultiplyResults [GOOD] >> KqpFederatedQuery::ForbiddenCallablesForYdbTables >> ydb-library-yql-udfs-common-roaring-test::import_test [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat] [SKIPPED] >> test.py::test[binding-table_from_binding-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client5-column_type5-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client6-column_type6-True] >> ydb-tests-functional-tpc-large::import_test [GOOD] |97.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/udfs/common/roaring/test/import_test >> ydb-library-yql-udfs-common-roaring-test::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/library/yql/udfs/common/roaring/test/import_test >> test.py::test[case-case_size_eq_cast-default.txt] [GOOD] >> test.py::test[coalesce-coalesce_few_opt-] |97.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/import_test >> ydb-tests-functional-tpc-large::import_test [GOOD] |97.8%| [TS] {RESULT} ydb/tests/functional/tpc/large/import_test >> KqpQuerySession::NoLocalAttach [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client6-column_type6-True] [GOOD] >> KqpFederatedQuery::ForbiddenCallablesForYdbTables [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLocationWithoutSlashAtTheEnd >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client7-column_type7-True] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] [GOOD] >> test.py::test[blocks-pg_to_dates-] [GOOD] >> test.py::test[blocks-pg_top_sort-] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] >> test.py::test[join-join_key_cmp_udf-] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client7-column_type7-True] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> TDqPqRdReadActorTests::Backpressure >> test.py::test[blocks-date_not_equals_scalar-] [GOOD] >> test.py::test[blocks-date_sub_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client8-column_type8-False] >> test.py::test[blocks-filter_expr-] [GOOD] >> test.py::test[blocks-interval_mul_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client8-column_type8-False] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client9-column_type9-False] >> test.py::test[join-mergejoin_small_primary-] [GOOD] >> test.py::test[join-premap_common_left_cross-off] >> test.py::test[join-mapjoin_sharded-default.txt] >> test.py::test[join-filter_joined-] [GOOD] >> test.py::test[join-from_in_front_join-] >> KqpFederatedQuery::ExecuteScriptWithLocationWithoutSlashAtTheEnd [GOOD] >> KqpFederatedQuery::StreamExecuteScriptWithGenericAutoDetection >> test.py::test[join-premap_common_left_cross-off] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-] [SKIPPED] >> test.py::test[blocks-combine_all_count_filter-] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested-] >> test.py::test[join-anyjoin_common_dup-] [GOOD] >> test.py::test[join-bush_in_in-off] >> test.py::test[join-split_to_list_as_key-off] [SKIPPED] >> test.py::test[join-yql-14829_leftonly-] [SKIPPED] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt] [SKIPPED] >> test.py::test[like-like_clause-default.txt] >> test.py::test[join-bush_in_in-off] [SKIPPED] >> test.py::test[join-bush_in_in_in-off] [SKIPPED] >> test.py::test[join-equi_join_by_expr-off] [SKIPPED] >> test.py::test[join-join_without_correlation_and_dict_access-] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] >> Backup::UuidValue >> test_db_counters.py::TestStorageCounters::test_storage_counters[disable_separate_quotas] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client10-column_type10-False] >> test.py::test[flatten_by-flatten_one_field_another-] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr-] >> KqpFederatedQuery::StreamExecuteScriptWithGenericAutoDetection [GOOD] >> KqpFederatedQuery::ExecuteScriptWithGenericAutoDetection >> test.py::test[blocks-pg_top_sort-] [GOOD] >> test.py::test[blocks-top_sort_one_desc-] >> test.py::test[coalesce-coalesce_few_opt-] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail5-] [SKIPPED] >> test.py::test[binding-table_from_binding-default.txt] [GOOD] >> test.py::test[blocks-add_int16-] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client10-column_type10-False] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] |97.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client11-column_type11-False] |97.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpFederatedQuery::ExecuteScriptWithGenericAutoDetection [GOOD] >> KqpFederatedQuery::ExplainScriptWithGenericAutoDetection >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] >> test.py::test[join-join_with_duplicate_keys_on_sorted-] [GOOD] >> test.py::test[join-lookupjoin_inner-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client11-column_type11-False] [GOOD] >> test.py::test[join-from_in_front_join-] [GOOD] >> test.py::test[join-full_trivial-off] >> test.py::test[blocks-combine_hashed_minmax_nested-] [GOOD] >> test.py::test[blocks-date_equals_scalar-] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client12-column_type12-False] >> test.py::test[join-full_trivial-off] [SKIPPED] >> test.py::test[join-inner_with_order-off] [SKIPPED] >> test.py::test[join-join_and_distinct_key-] >> test_statistics.py::TestS3::test_sum[v1-client0] [GOOD] >> test.py::test[blocks-interval_mul_scalar-] [GOOD] >> test.py::test[blocks-mod_uint64_opt2-] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] >> test.py::test[join-join_without_correlation_and_dict_access-] [GOOD] >> test.py::test[join-left_semi_with_other-off] [SKIPPED] >> test.py::test[join-left_trivial-] >> test.py::test[join-mapjoin_sharded-default.txt] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off] [SKIPPED] >> KqpFederatedQuery::ExplainScriptWithGenericAutoDetection [GOOD] >> KqpFederatedQuery::ReadFromDataSourceWithoutTable >> test.py::test[blocks-date_sub_scalar-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-off] [SKIPPED] >> test.py::test[blocks-div_uint64_opt2-] >> test.py::test[join-premap_common_inner_filter-off] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client12-column_type12-False] [GOOD] >> test.py::test[like-like_clause-default.txt] [GOOD] >> test.py::test[lineage-list_literal3-default.txt] >> test.py::test[join-premap_common_inner_filter-off] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-] >> test.py::test[lineage-list_literal3-default.txt] [SKIPPED] >> test.py::test[lineage-window_session-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client13-column_type13-False] >> test.py::test[join-star_join_semionly_premap-] [SKIPPED] >> test.py::test[join-starjoin_unused_keys-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] [GOOD] >> test.py::test[lineage-window_session-default.txt] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input-] [SKIPPED] >> test.py::test[optimizers-multi_to_empty_constraint-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] >> test.py::test[column_order-winfunc-default.txt] [GOOD] >> test.py::test[count-count_by_nulls-] >> test.py::test[blocks-add_int16-] [GOOD] >> Backup::UuidValue [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> test.py::test[blocks-add_int64-] >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client13-column_type13-False] [GOOD] >> test.py::test[blocks-top_sort_one_desc-] [GOOD] >> KqpFederatedQuery::ReadFromDataSourceWithoutTable [GOOD] >> KqpFederatedQuery::InsertIntoDataSourceWithoutTable >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client14-column_type14-False] >> test.py::test[blocks-top_sort_two_mix-] >> test.py::test[blocks-mod_uint64_opt2-] [GOOD] >> test.py::test[blocks-sort_two_desc-] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] [GOOD] |97.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/import_test >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |97.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/import_test >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] >> test.py::test[flatten_by-flatten_with_group_by_expr-] [GOOD] >> test.py::test[hor_join-row_num_per_sect-] >> test.py::test[blocks-date_equals_scalar-] [GOOD] >> test.py::test[blocks-date_sub_interval-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_BASIC-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client14-column_type14-False] [GOOD] >> KqpFederatedQuery::InsertIntoDataSourceWithoutTable [GOOD] >> KqpFederatedQuery::SpecifyExternalTableInsteadOfExternalDataSource >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client15-column_type15-False] >> test.py::test[join-lookupjoin_inner-] [GOOD] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> test.py::test[join-mapjoin_partial_uniq_keys-] >> test.py::test[count-count_by_nulls-] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client15-column_type15-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-false-client0] >> test.py::test[count-count_const_no_grouping-default.txt] >> test.py::test[join-join_and_distinct_key-] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt-] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> KqpFederatedQuery::SpecifyExternalTableInsteadOfExternalDataSource [GOOD] >> KqpFederatedQuery::QueryWithNoDataInS3 >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] >> test.py::test[optimizers-multi_to_empty_constraint-] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt] >> test.py::test[blocks-div_uint64_opt2-] [GOOD] >> test.py::test[join-left_trivial-] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off] [SKIPPED] >> test.py::test[join-mergejoin_force_align1-off] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column-] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input-] >> test.py::test[join-starjoin_unused_keys-] [GOOD] >> test.py::test[blocks-pg_tofrom-] >> test.py::test[blocks-sort_two_desc-] [GOOD] >> test.py::test[blocks-sub_uint64_opt2-] >> test.py::test[json-json_query/example-] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore >> test.py::test[blocks-date_sub_interval-] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] [GOOD] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] [GOOD] >> KqpFederatedQuery::QueryWithNoDataInS3 [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLargeStrings >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc-] [SKIPPED] >> test.py::test[dq-blacklisted_pragmas-] [SKIPPED] >> test.py::test[dq-mem_limit-] [SKIPPED] >> test.py::test[expr-empty_iterator-] >> test.py::test[blocks-add_int64-] [GOOD] >> test.py::test[blocks-block_input-] [SKIPPED] >> test.py::test[blocks-coalesce_bools-] >> test.py::test[join-left_join_right_pushdown_no_opt-] [GOOD] >> test.py::test[join-left_null_literal-off] [SKIPPED] >> test.py::test[join-left_only_with_other-off] [SKIPPED] >> test.py::test[blocks-top_sort_two_mix-] [GOOD] >> test.py::test[column_order-insert-] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols-] >> test.py::test[join-mapjoin_partial_uniq_keys-] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-] >> test.py::test[column_order-insert_with_reorder_cols-] [SKIPPED] >> test.py::test[count-count_no_grouping-default.txt] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-false-client0] [GOOD] >> test.py::test[join-mergejoin_force_align3-] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-off] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-true-client0] >> test.py::test[join-opt_on_opt_side-off] [SKIPPED] >> test.py::test[join-premap_map_cross-] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off] >> test.py::test[join-mergejoin_left_null_column-] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-] >> test.py::test[join-premap_merge_extrasort1-off] [SKIPPED] >> test.py::test[join-pullup_inner-off] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left-] >> test.py::test[blocks-sub_uint64_opt2-] [GOOD] >> test.py::test[blocks-top_sort_two_asc-] >> test.py::test[optimizers-yql-2582_limit_for_join_input-] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt] [SKIPPED] >> test.py::test[order_by-SortByTwoFields-] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |97.9%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> test.py::test[hor_join-row_num_per_sect-] [GOOD] >> test.py::test[in-in_compact_distinct-empty] >> test.py::test[blocks-lazy_nonstrict_nested-] [GOOD] >> test.py::test[blocks-minmax_strings_filter-] >> test.py::test_local [GOOD] >> test.py::test[expr-empty_iterator-] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.9%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test >> test.py::test[join-pushdown_filter_over_left-] [GOOD] >> test.py::test[join-star_join-] >> test.py::test[json-json_query/example-] [GOOD] >> test.py::test[key_filter-mixed_opt_bounds-] [SKIPPED] >> test.py::test[key_filter-multiusage-] [SKIPPED] >> test.py::test[key_filter-part_key_over_dynamic-] >> test.py::test[blocks-pg_tofrom-] [GOOD] >> test.py::test[blocks-sort_one_desc-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] [GOOD] >> test.py::test[blocks-top_sort_two_asc-] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] >> test.py::test[column_group-hint_append-] [SKIPPED] >> test.py::test[column_group-length-single] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> test.py::test[join-pullup_extra_columns-] [SKIPPED] >> test.py::test[join-simple_columns_partial-] >> test.py::test[count-count_no_grouping-default.txt] [GOOD] >> test.py::test[csee-yql-7237-] [SKIPPED] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] [GOOD] >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o-] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt] >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] >> test.py::test[join-mapjoin_opt_vs_2xopt-] [SKIPPED] >> test.py::test[join-mergejoin_big_primary_unique-] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore [GOOD] >> test.py::test[blocks-coalesce_bools-] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt] [SKIPPED] >> test.py::test[blocks-date_greater-] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-false-client0] >> test.py::test[order_by-SortByTwoFields-] [GOOD] >> test.py::test[order_by-assume_over_input_desc-] [SKIPPED] >> test.py::test[order_by-native_desc_assume_with_transform-] [SKIPPED] >> test.py::test[params-complex_yson-] [SKIPPED] >> test.py::test[pg-select_columnref2-default.txt] >> test.py::test[pg-select_columnref2-default.txt] [SKIPPED] >> test.py::test[pg-select_qstarref1-default.txt] [SKIPPED] >> test.py::test[pg-select_table1-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q17-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000790/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000790/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1389434) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1391618 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[pg-tpcds-q17-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q43-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q86-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q89-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] >> test.py::test[pg-tpch-q06-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q20-default.txt] [SKIPPED] >> test.py::test[produce-process_streaming-default.txt] >> test.py::test[key_filter-part_key_over_dynamic-] [GOOD] >> test.py::test[lambda-lambda_udf-] >> test.py::test[blocks-minmax_strings_filter-] [GOOD] >> test.py::test[blocks-nested_optionals-] >> test_workload.py::TestYdbWorkload::test[row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] 2025-05-05 03:24:31,788 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |97.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] >> test.py::test[in-in_compact_distinct-empty] [GOOD] >> test.py::test[join-simple_columns_partial-] [GOOD] >> test.py::test[join-star_join-] [GOOD] >> test.py::test[in-in_sorted_by_tuple-] >> test.py::test[join-three_equalities_paren-off] [SKIPPED] >> test.py::test[join-three_equalities-] >> test.py::test[key_filter-dict_contains-default.txt] |97.9%| [TM] {RESULT} ydb/tests/functional/serializable/py3test |98.0%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] >> test.py::test[expr-evaluate_parse_inf_nan-] [GOOD] >> test.py::test[flatten_by-flatten_with_resource-] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all-] [SKIPPED] >> test.py::test[in-in_noansi_join-] >> test.py::test[distinct-distinct_list_after_group-default.txt] [GOOD] >> test.py::test[dq-wrong_script_segf-] [SKIPPED] >> test.py::test[flatten_by-flatten_mode-default.txt] [SKIPPED] >> test.py::test[hor_join-group_ranges-] >> test.py::test[blocks-date_greater-] [GOOD] >> test.py::test[hor_join-group_ranges-] [SKIPPED] >> test.py::test[in-in_types_cast-default.txt] >> test.py::test[blocks-date_not_equals-] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-false-client0] [GOOD] >> test.py::test[blocks-sort_one_desc-] [GOOD] >> test.py::test[case-case_val_then_else-default.txt] >> test.py::test[column_group-length-single] [GOOD] >> test.py::test[column_group-many_inserts-] [SKIPPED] >> test.py::test[column_group-respull-] [SKIPPED] >> test.py::test[join-mergejoin_big_primary_unique-] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner-] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-true-client0] >> test.py::test[count-count-] >> test.py::test[blocks-nested_optionals-] [GOOD] >> test.py::test[coalesce-coalesce-] >> test.py::test[action-insert_after_eval-] [SKIPPED] >> test.py::test[action-parallel_for-default.txt] [SKIPPED] >> test.py::test[action-process_from_subquery_with_orderby-default.txt] [SKIPPED] >> test.py::test[action-subquery_opt_args-default.txt] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt] [SKIPPED] >> test.py::test[aggregate-compare_by_nulls-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] >> test_example.py::TestExample::test_example >> ydb-tests-functional-compatibility::import_test [GOOD] |98.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] |98.0%| [TS] {RESULT} ydb/tests/functional/compatibility/import_test >> test.py::test[join-three_equalities-] [GOOD] >> test.py::test[join-three_equalities_paren-] >> test.py::test[in-in_sorted_by_tuple-] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin-] >> test.py::test[produce-process_streaming-default.txt] [GOOD] >> test.py::test[ql_filter-integer_members_eval-] [SKIPPED] >> test.py::test[sampling-direct_read-dynamic] [SKIPPED] >> test.py::test[blocks-date_not_equals-] [GOOD] >> test.py::test[blocks-decimal_avg-] [SKIPPED] >> test.py::test[blocks-decimal_multiplicative_ops-] [SKIPPED] >> test.py::test[blocks-decimal_op_decimal_scalar-] [SKIPPED] >> test.py::test[blocks-distinct_mixed_keys-] >> test.py::test[lambda-lambda_udf-] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt] >> test.py::test[sampling-join_left_sample-default.txt] [SKIPPED] >> test.py::test[sampling-table_content-] [SKIPPED] >> test.py::test[sampling-take_with_sampling-default.txt] [SKIPPED] >> test.py::test[schema-user_schema_existing_column-] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-false-client0] >> test.py::test[count-count-] [GOOD] >> test.py::test[expr-non_persistable_inner_select_fail-] [SKIPPED] >> test.py::test[flatten_by-flatten_dict_by_opt-] >> test.py::test[in-in_types_cast-default.txt] [GOOD] >> test.py::test[in-in_with_tuple-default.txt] >> test.py::test[in-in_noansi_join-] [GOOD] >> test.py::test[insert-append-with_view] [SKIPPED] >> test.py::test[insert-append_after_replace-default.txt] [SKIPPED] >> test.py::test[insert-append_sorted-to_sorted_desc] [SKIPPED] >> test.py::test[insert-insert_relabeled-default.txt] [SKIPPED] >> test.py::test[key_filter-dict_contains-default.txt] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt] [SKIPPED] >> test.py::test[insert-use_anon_table_without_fill_fail-] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] >> test.py::test[join-mergejoin_semi_composite_to_inner-] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_FULL-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test.py::test[case-case_val_then_else-default.txt] [GOOD] >> test.py::test[case-case_val_when_then-default.txt] >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] >> test_example.py::TestExample::test_linked_with_testcase [GOOD] >> ydb-library-benchmarks-runner::import_test [GOOD] >> test.py::test[coalesce-coalesce-] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt] >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |98.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt] |98.0%| [TS] {RESULT} ydb/library/benchmarks/runner/import_test >> test.py::test[join-bush_dis_in_in_in-] [GOOD] >> test.py::test[join-convert_key-off] [SKIPPED] >> test.py::test[join-flatten_columns1-off] [SKIPPED] >> test.py::test[join-left_cast_to_string-] >> test.py::test[limit-empty_sort_after_limit-default.txt] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt] >> test.py::test[schema-user_schema_existing_column-] [GOOD] >> test.py::test[select-swap_columns-default.txt] >> test.py::test[join-mergejoin_semi_to_inner-] [GOOD] >> test.py::test[join-mergejoin_small_primary-off] [SKIPPED] >> test.py::test[join-premap_map_inner-] [SKIPPED] >> test.py::test[join-pullup_context_dep-] >> ydb-tests-fq-plans::import_test [GOOD] >> test.py::test[action-action_eval_cluster_table-] [SKIPPED] >> test.py::test[action-action_nested_query-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False] >> test.py::test[join-three_equalities_paren-] [GOOD] >> test.py::test[join-trivial_view-off] [SKIPPED] >> test.py::test[join-yql-19081-] [SKIPPED] >> test.py::test[join-yql-8980-off] [SKIPPED] >> test.py::test[key_filter-calc_dependent-default.txt] |98.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] |98.0%| [TS] {RESULT} ydb/tests/fq/plans/import_test >> test.py::test[flatten_by-flatten_dict_by_opt-] [GOOD] >> test.py::test[flatten_by-flatten_expr_join-] >> test.py::test[in-in_with_tuple-default.txt] [GOOD] >> test.py::test[insert-append_sorted-] [SKIPPED] >> test.py::test[insert-keepmeta-] [SKIPPED] >> test.py::test[insert-udf_empty-] [SKIPPED] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty-] >> test.py::test[blocks-distinct_mixed_keys-] [GOOD] >> test.py::test[blocks-finalize_hashed_keys-] >> test.py::test[join-bush_dis_in-off] [SKIPPED] >> test.py::test[join-cbo_7tables-] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test.py::test[lambda-lambda_with_tie-default.txt] [GOOD] >> test.py::test[lineage-select_group_by_key-default.txt] [SKIPPED] >> test.py::test[multicluster-extend-default.txt] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content-] [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt] >> test.py::test[in-in_tablesource_to_equijoin-] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-false-client0] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt] [SKIPPED] >> test.py::test[insert-insert_from_other-] [SKIPPED] >> test.py::test[insert-override-from_sorted_desc] [SKIPPED] >> test.py::test[insert-select_with_sort_limit-default.txt] [SKIPPED] >> test.py::test[insert-trivial_select-default.txt] [SKIPPED] >> test.py::test[insert-use_anon_table_before_commit_fail-] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail-] [SKIPPED] >> test.py::test[join-from_in_front_join-off] [SKIPPED] >> test.py::test[join-inner_all_right-off] [SKIPPED] >> test.py::test[join-join_cbo_3_tables-] [SKIPPED] >> test.py::test[coalesce-coalesce_sugar-default.txt] [GOOD] >> test.py::test[column_group-hint_anon-perusage] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail-] [SKIPPED] >> test.py::test[column_order-ordered_plus_native-] [SKIPPED] >> test.py::test[distinct-distinct_and_join-] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-true-client0] >> test.py::test[join-join_without_correlation_and_struct_access-] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test.py::test[limit-many_top_sorts-default.txt] [GOOD] >> test.py::test[lineage-topsort-default.txt] [SKIPPED] >> test.py::test[join-pullup_context_dep-] [GOOD] >> test.py::test[join-star_join_inners-] >> test.py::test[case-case_val_when_then-default.txt] [GOOD] >> test.py::test[case-case_when_then-default.txt] >> test.py::test[lineage-unused_columns-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps-] [SKIPPED] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers-] [SKIPPED] >> test.py::test[order_by-order_by_mul_columns-default.txt] >> test.py::test[select-swap_columns-default.txt] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt] [SKIPPED] >> test.py::test[select-tablepathprefix-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False] [GOOD] >> test.py::test[join-left_cast_to_string-] [GOOD] >> test.py::test[join-left_only_semi_and_other-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client10-year String NOT NULL-True] >> test.py::test[key_filter-calc_dependent-default.txt] [GOOD] >> test.py::test[key_filter-contains-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[flatten_by-flatten_expr_join-] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-] [SKIPPED] >> test.py::test[hor_join-max_in_tables-] >> test.py::test[action-action_nested_query-default.txt] [GOOD] >> test.py::test[action-eval_atom_wrong_type_param-] [SKIPPED] >> test.py::test[action-eval_like-] [SKIPPED] >> test.py::test[distinct-distinct_and_join-] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] >> test.py::test[join-equi_join_three_asterisk-] [GOOD] >> test.py::test[join-flatten_columns1-] >> test.py::test[action-eval_on_modif_table_fail-] [SKIPPED] >> test.py::test[action-eval_unresolved_type_arg-default.txt] >> test.py::test[hor_join-yield_off-] [SKIPPED] >> test.py::test[insert-trivial_literals_multirow-default.txt] [SKIPPED] >> test.py::test[join-cbo_4tables-] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys-] [SKIPPED] >> test.py::test[join-grace_join1-map] [SKIPPED] >> test.py::test[join-inner_grouped_by_expr-] >> test.py::test[blocks-finalize_hashed_keys-] [GOOD] >> test.py::test[blocks-group_by_complex_key-] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage-] [SKIPPED] >> test.py::test[optimizers-yt_shuffle_by_keys-] >> test.py::test[optimizers-yt_shuffle_by_keys-] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted] [SKIPPED] >> test.py::test[order_by-order_by_dot_column-default.txt] >> test.py::test[join-star_join_inners-] [GOOD] >> test.py::test[key_filter-decimal-] [SKIPPED] >> test.py::test[key_filter-is_null-] >> test.py::test[aggregate-group_by_gs_few_empty-] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt] >> test.py::test[join-lookupjoin_semi_1o-] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-] >> test.py::test[join-join_without_correlation_and_struct_access-] [GOOD] >> test.py::test[join-left_join_null_column-] >> test.py::test[join-inner_grouped_by_expr-] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client10-year String NOT NULL-True] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off] [SKIPPED] >> test.py::test[join-inner_trivial_from_concat-off] [SKIPPED] >> test.py::test[join-inner_with_order-] >> test.py::test[select-tablepathprefix-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client11-year String-False] >> test.py::test[hor_join-max_in_tables-] [GOOD] >> test.py::test[in-in_ansi_join-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-false-client0] >> test.py::test[order_by-order_by_mul_columns-default.txt] [GOOD] >> test.py::test[order_by-order_by_udf_duo-] >> test.py::test[case-case_when_then-default.txt] [GOOD] >> test.py::test[column_group-hint_anon_groups-perusage] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail-] [SKIPPED] >> test.py::test[column_group-length-perusage] >> test.py::test[action-eval_unresolved_type_arg-default.txt] [GOOD] >> test.py::test[action-nested_subquery-] [SKIPPED] >> test.py::test[action-select_from_subquery_with_orderby-default.txt] [SKIPPED] >> test.py::test[action-subquery-default.txt] >> ydb-tests-functional-wardens::import_test [GOOD] >> test.py::test[action-subquery-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bitxor-default.txt] [SKIPPED] >> test.py::test[aggr_factory-sum_if-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_list-default.txt] [SKIPPED] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt] >> test.py::test[key_filter-contains-default.txt] [GOOD] >> test.py::test[key_filter-is_null_multi_key-] >> test.py::test[key_filter-is_null-] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic] [SKIPPED] >> test.py::test[limit-limit-dynamic] [SKIPPED] >> test.py::test[limit-limit_offset-default.txt] |98.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |98.0%| [TS] {RESULT} ydb/tests/functional/wardens/import_test >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table >> test.py::test[join-left_join_null_column-] [GOOD] >> test.py::test[join-flatten_columns1-] [GOOD] >> test.py::test[join-full_equal_not_null-] >> test.py::test[join-left_join_right_pushdown_optional-] [SKIPPED] >> test.py::test[join-left_trivial-off] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533-off] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce-off] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [GOOD] >> test.py::test[blocks-group_by_complex_key-] [GOOD] >> test.py::test[blocks-string_len_and_cmp-] >> test.py::test[join-inner_with_order-] [GOOD] >> test.py::test[join-join_comp_map_table-off] [SKIPPED] >> test.py::test[join-join_no_correlation_in_order_by-] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing-] [SKIPPED] >> test.py::test[tpch-q15-default.txt] >> test.py::test[aggregate-group_by_gs_subselect-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_session_aliases-] >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00076d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00076d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1397738) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00076d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/test_statistics.py.TestS3.test_egress.v2-client0-json_list/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00076d/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/test_statistics.py.TestS3.test_egress.v2-client0-json_list/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1401292 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[order_by-order_by_dot_column-default.txt] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt] >> test.py::test[order_by-order_by_udf_duo-] [GOOD] >> test.py::test[pg-select_where-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q11-default.txt] >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q16-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q28-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] >> test.py::test[pg-tpcds-q28-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q37-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q38-default.txt] |98.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |98.0%| [TM] {RESULT} ydb/tests/example/py3test >> test.py::test[column_group-length-perusage] [GOOD] >> test.py::test[column_order-align_publish-] [SKIPPED] >> test.py::test[column_order-select_sample-default.txt] >> test.py::test[pg-tpcds-q38-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q52-default.txt] >> test.py::test[pg-tpcds-q52-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q10-default.txt] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client11-year String-False] [GOOD] >> test.py::test[key_filter-is_null_multi_key-] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread-] [SKIPPED] >> test.py::test[produce-fuse_reduces_diff_sets-] [SKIPPED] >> test.py::test[produce-native_desc_reduce_with_presort-] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge-] [SKIPPED] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client12-year Utf8-False] >> test.py::test[produce-process_multi_out_bad_count_fail-] [SKIPPED] >> test.py::test[produce-process_streaming_inline_bash-default.txt] >> test.py::test[join-mapjoin_with_empty_read-] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-] >> test.py::test[join-mergejoin_force_one_sorted-] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_unmatched-] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner-] >> test.py::test[join-full_equal_not_null-] [GOOD] >> test.py::test[join-join_comp_common_table-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_FULL-client0] [GOOD] >> test.py::test[tpch-q15-default.txt] [GOOD] >> test.py::test[tpch-q9-default.txt] >> test.py::test[join-join_no_correlation_in_order_by-] [GOOD] >> test.py::test[join-left_only_with_other-] >> test.py::test[blocks-string_len_and_cmp-] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt] >> test.py::test[limit-limit_offset-default.txt] [GOOD] >> test.py::test[limit-yql-8611_calc_peephole-] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt] [SKIPPED] >> test.py::test[multicluster-pull-default.txt] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out-] [SKIPPED] >> test.py::test[optimizers-group_visit_lambdas-] [SKIPPED] >> test.py::test[optimizers-sort_by_nonstrict_const-] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-false-client0] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt] >> test.py::test[join-mergejoin_saves_output_sort_nested-] >> test.py::test[in-in_ansi_join-] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] [GOOD] >> test.py::test[key_filter-range_union-] >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_session_aliases-] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct-] [SKIPPED] >> test.py::test[aggregate-group_by_with_where-default.txt] >> test.py::test[binding-table_filter_strict_binding-default.txt] [SKIPPED] >> test.py::test[blocks-combine_all_minmax_double-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client12-year Utf8-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client13-year Date-False] >> test.py::test[produce-process_streaming_inline_bash-default.txt] [GOOD] >> test.py::test[produce-reduce_all_multi_in-default.txt] [SKIPPED] >> test.py::test[produce-reduce_with_python-] [SKIPPED] >> test.py::test[ql_filter-integer_many_right-] [SKIPPED] >> test.py::test[result_types-pg-default.txt] [SKIPPED] >> test.py::test[sampling-bind_expr-default.txt] [SKIPPED] >> test.py::test[sampling-sample-default.txt] >> test.py::test[join-join_comp_common_table-] [GOOD] >> test.py::test[join-lookupjoin_semi_2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_with_cache-] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord-off] >> test.py::test[join-mapjoin_on_tablerecord-off] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-] [SKIPPED] >> test.py::test[join-mergejoin_big_primary-] >> test.py::test[column_order-select_sample-default.txt] [GOOD] >> test.py::test[dq-precompute_asyncfile-] [SKIPPED] >> test.py::test[expr-inline_call-] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner-] [GOOD] >> test.py::test[join-premap_merge_extrasort1-] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2-off] [SKIPPED] >> test.py::test[join-premap_no_premap-] >> test.py::test[join-premap_no_premap-] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap-] [SKIPPED] >> test.py::test[join-pullup_exclusion-] >> test.py::test[count-count_nullable_sub-default.txt] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt] >> test.py::test[order_by-order_by_value_desc-default.txt] [GOOD] >> test.py::test[pg-aggregate_combine_all-] [SKIPPED] >> test.py::test[pg-select_qstarref2-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q04-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q35-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q40-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q57-default.txt] >> test.py::test[in-in_immediate_subquery-default.txt] [GOOD] >> test.py::test[insert-append_proto_fail-] [SKIPPED] >> test.py::test[insert-from_two_sorted_by_calc-default.txt] [SKIPPED] >> test.py::test[insert-override-proto] [SKIPPED] >> test.py::test[insert_monotonic-not_all_fail-] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-] >> test.py::test[tpch-q9-default.txt] [GOOD] >> test.py::test[type_v3-json-] [SKIPPED] >> test.py::test[type_v3-split-] [SKIPPED] >> test.py::test[udf-udaf_short-] [SKIPPED] >> test.py::test[union_all-mix_map_and_project-trivial_map] >> test.py::test[pg-tpcds-q57-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q58-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q66-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q82-default.txt] [SKIPPED] >> test.py::test[produce-fuse_reduces_with_presort-] [SKIPPED] >> test.py::test[produce-process_multi_in-] [SKIPPED] >> test.py::test[ql_filter-integer_single-] [SKIPPED] >> test.py::test[result_types-containers-default.txt] [SKIPPED] >> test.py::test[sampling-bind_join_right-default.txt] [SKIPPED] >> test.py::test[sampling-map-keyfilter] [SKIPPED] >> test.py::test[schema-fake_column-default.txt] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test.py::test[select-braces-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1391314) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1394296 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_precompute.v2-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00077f/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_precompute.v2-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> test.py::test[join-mergejoin_big_primary-] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client13-year Date-False] [GOOD] >> test.py::test[join-left_only_with_other-] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client0-year Int32 NOT NULL-False] >> test.py::test[aggregate-group_by_with_where-default.txt] [GOOD] >> test.py::test[aggregate-percentile_and_variance-] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt] [GOOD] >> test.py::test[optimizers-test_no_aggregate_split-] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> ServerRestartTest::RestartOnGetSession >> test.py::test[join-pullup_exclusion-] [GOOD] >> test.py::test[join-pullup_renaming-off] [SKIPPED] >> test.py::test[join-simple_columns_partial-off] [SKIPPED] >> test.py::test[json-jsondocument/select-] >> test.py::test[join-mergejoin_saves_output_sort_nested-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-] >> test.py::test[expr-inline_call-] [GOOD] >> test.py::test[file-parse_file_in_select_as_int-] [SKIPPED] >> test.py::test[flatten_by-flatten_columns-default.txt] >> test.py::test[blocks-combine_all_minmax_double-] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. yielded = self.gen.throw(*exc_info) >> test.py::test[distinct-distinct_columns-default.txt] [GOOD] >> test.py::test[dq-read_cost-default.txt] [SKIPPED] >> test.py::test[expr-langver-] [SKIPPED] >> test.py::test[expr-len-] >> test.py::test[sampling-sample-default.txt] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt] [SKIPPED] >> test.py::test[sampling-system_sampling-] >> test.py::test[union_all-mix_map_and_project-trivial_map] [GOOD] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> test.py::test[sampling-system_sampling-] [SKIPPED] >> test.py::test[schema-other_job-] [SKIPPED] >> test.py::test[schema-select_all-row_spec_diff_sort] >> test.py::test[view-all_from_view-] >> test.py::test[key_filter-range_union-] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt] [SKIPPED] >> test.py::test[order_by-sort_with_take-] [SKIPPED] >> test.py::test[pg-insert-] [SKIPPED] >> test.py::test[pg-select_from_columns_qstar-default.txt] [SKIPPED] >> test.py::test[pg-select_subquery2_qstar-default.txt] [SKIPPED] >> test.py::test[pg-select_unionall_self-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q08-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q31-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q54-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q62-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q09-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v1-true-client0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[pg-tpch-q09-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q11-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q19-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test.py::test[pg-tpch-q19-default.txt] [SKIPPED] >> test.py::test[produce-process_multi_in_single_out-] [SKIPPED] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt] [SKIPPED] >> test.py::test[produce-reduce_by_struct-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in-] [SKIPPED] >> test.py::test[produce-reduce_typeinfo-] [SKIPPED] >> test.py::test[ql_filter-integer_single_disable_prune-] [SKIPPED] >> test.py::test[sampling-bind_multiple_sample-default.txt] >> test.py::test[aggregate-percentile_and_variance-] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt] [SKIPPED] >> test.py::test[sampling-bind_multiple_sample-default.txt] [SKIPPED] >> test.py::test[sampling-read-dynamic] [SKIPPED] >> test.py::test[sampling-reduce_with_presort-] [SKIPPED] >> test.py::test[schema-limit_simple-] [SKIPPED] >> test.py::test[schema-select_all-row_spec_diff_sort_desc] >> test.py::test[json-jsondocument/select-] [GOOD] >> test.py::test[select-braces-default.txt] [GOOD] >> test.py::test[select-exists_with_table-default.txt] [SKIPPED] >> test.py::test[blocks-bitcast_block-] >> test.py::test[join-mergejoin_with_different_key_names_norename-] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client0-year Int32 NOT NULL-False] [GOOD] >> test.py::test[key_filter-is_null_with_condition-] >> test.py::test[select-substring_v1-default.txt] >> test.py::test[join-order_of_qualified-off] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap-] [SKIPPED] >> test.py::test[join-pullup_left_semi-off] [SKIPPED] >> test.py::test[join-pullup_null_column-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client1-year Uint32 NOT NULL-False] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-] [GOOD] >> test.py::test[join-mapjoin_unused_keys-] >> test.py::test[blocks-combine_all_sum_filter_opt-] [GOOD] >> test.py::test[blocks-minmax_strings-] >> test.py::test[expr-len-] [GOOD] >> test.py::test[flatten_by-flatten_few_fields-] >> test.py::test[join-anyjoin_merge_nodup-] [GOOD] >> test.py::test[join-bush_dis_in_in-] >> test.py::test[schema-select_all-row_spec_diff_sort] [GOOD] >> test.py::test[select-exists_false-default.txt] >> test.py::test[action-dep_world_action_quote-default.txt] >> test.py::test[optimizers-test_no_aggregate_split-] [GOOD] >> test.py::test[optimizers-unused_columns_group-] >> test.py::test[flatten_by-flatten_columns-default.txt] [GOOD] >> test.py::test[in-in_enum_single1-default.txt] >> test.py::test[join-mergejoin_choose_primary_with_retry-] [GOOD] >> test.py::test[join-mergejoin_force_align3-off] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename-off] >> test.py::test[join-mergejoin_with_different_key_names_norename-off] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn-] >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v2-true-client0] >> test.py::test[select-substring_v1-default.txt] [GOOD] >> test.py::test[select-trivial_order_by-default.txt] >> test.py::test[schema-select_all-row_spec_diff_sort_desc] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp-] >> test.py::test[blocks-bitcast_block-] [GOOD] >> test.py::test[blocks-block_input_sys_columns-] [SKIPPED] >> test.py::test[blocks-combine_hashed_minmax_double-] >> test.py::test[key_filter-is_null_with_condition-] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt] >> test.py::test[join-pullup_null_column-] [GOOD] >> test.py::test[join-pullup_rownumber-] >> test.py::test[view-all_from_view-] [GOOD] >> test.py::test[view-trivial_view-] >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] >> test.py::test[flatten_by-flatten_few_fields-] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client1-year Uint32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client2-year Uint64 NOT NULL-False] >> test.py::test[join-mapjoin_unused_keys-] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off] [SKIPPED] >> test.py::test[join-mergejoin_force_align1-] >> test.py::test[select-exists_false-default.txt] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt] >> test.py::test[join-mergejoin_force_align1-] [SKIPPED] >> test.py::test[join-mergejoin_force_align2-off] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_cross-off] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_left-] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test.py::test[optimizers-unused_columns_group-] [GOOD] >> test.py::test[order_by-changed_sort_with_limit-] [SKIPPED] >> test.py::test[order_by-order_with_null-default.txt] >> test.py::test[join-bush_dis_in_in-] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off] [SKIPPED] >> test.py::test[join-count_bans-] >> test.py::test[blocks-minmax_strings-] [GOOD] >> test.py::test[blocks-partial_blocks1-] >> KqpFederatedQuery::ExecuteScriptWithLargeStrings [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLargeFile >> test.py::test[select-trivial_order_by-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt] >> test.py::test[action-dep_world_action_quote-default.txt] [GOOD] >> test.py::test[action-subquery_merge1-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bitor-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bottom_by-default.txt] [SKIPPED] >> test.py::test[aggr_factory-log_histogram-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt] >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v2-true-client0] [GOOD] >> test.py::test[view-trivial_view-] [GOOD] >> test.py::test[view-view_with_lambda-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test.py::test[in-in_enum_single1-default.txt] [GOOD] >> test.py::test[join-no_empty_join_for_dyn-] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt] [SKIPPED] >> test.py::test[join-pullup_extend-] [SKIPPED] >> test.py::test[insert-part_sortness-] [SKIPPED] >> test.py::test[join-pullup_left-off] [SKIPPED] >> test.py::test[insert-part_sortness-desc] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left-off] [SKIPPED] >> test.py::test[insert-replace_inferred-] [SKIPPED] >> test.py::test[join-right_trivial-] >> test.py::test[insert-yql-14538-] [SKIPPED] >> test.py::test[insert_monotonic-truncate_and_append-default.txt] [SKIPPED] >> test.py::test[join-bush_dis_in-] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] >> test.py::test[order_by-order_with_null-default.txt] [GOOD] >> test.py::test[pg-name-] [SKIPPED] >> test.py::test[pg-select_common_type_unionall-] [SKIPPED] >> test.py::test[pg-tpcds-q01-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q27-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q30-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q45-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q55-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q64-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q98-default.txt] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt] [SKIPPED] >> test.py::test[produce-process_with_udf_validate-default.txt] [SKIPPED] >> test.py::test[produce-reduce_lambda-] >> test.py::test[schema-select_all_inferschema_op_custom_tmp-] [GOOD] >> test.py::test[schema-select_fields_inferschema-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client3-year Date NOT NULL-False] >> test.py::test[join-pullup_rownumber-] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf-] >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] >> test.py::test[flatten_by-flatten_with_group_by-] [GOOD] >> test.py::test[hor_join-table_record-] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter2-] [SKIPPED] >> test.py::test[limit-limit_over_sort_desc_in_subquery-] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt] |98.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |98.1%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/import_test >> test.py::test[join-mergejoin_sorts_output_for_sort_left-] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_right-] >> test.py::test[blocks-combine_hashed_minmax_double-] [GOOD] >> test.py::test[blocks-date_equals-] >> test.py::test[select-one_unlabeled_column-default.txt] [GOOD] >> test.py::test[select-struct_access_without_table_name-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_PROFILE-client0] >> test.py::test[join-count_bans-] [GOOD] >> test.py::test[join-equi_join_two_mult_keys-] >> test.py::test[blocks-partial_blocks1-] [GOOD] >> test.py::test[blocks-pg_call-] [SKIPPED] >> test.py::test[blocks-pg_to_strings-] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test.py::test[produce-reduce_lambda-] [GOOD] >> test.py::test[produce-reduce_multi_out-] >> test.py::test[produce-reduce_multi_out-] [SKIPPED] >> test.py::test[sampling-map-dynamic] [SKIPPED] >> test.py::test[sampling-subquery_expr-default.txt] [SKIPPED] >> test.py::test[schema-row_spec_with_default_values-] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [FAIL] >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] [GOOD] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test.py::test[join-bush_dis_in-] [GOOD] >> test.py::test[join-convert_key-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range-] [SKIPPED] >> test.py::test[table_range-table_funcs_expr-] [SKIPPED] >> test.py::test[tpch-q20-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] >> test.py::test[view-view_with_lambda-] [GOOD] >> test.py::test[weak_field-weak_field_join-] >> test.py::test[blocks-date_equals-] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar-] >> test.py::test[hor_join-table_record-] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt] [SKIPPED] >> test.py::test[in-yql-10038-default.txt] [SKIPPED] >> test.py::test[in-yql-14677-default.txt] >> test.py::test[blocks-pg_to_strings-] [GOOD] >> test.py::test[blocks-string_as_agg_key-] >> test.py::test[limit-limit_over_sort_desc_in_subquery-] [GOOD] >> test.py::test[lineage-reduce_all_row-default.txt] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_right-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-] >> test.py::test[join-right_trivial-] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off] [SKIPPED] >> test.py::test[join-yql-14829_left-off] [SKIPPED] >> test.py::test[join-yql-4275-off] [SKIPPED] >> test.py::test[json-json_value/example-] >> test.py::test[lineage-window_many-default.txt] [SKIPPED] >> test.py::test[multicluster-map_force-] [SKIPPED] >> test.py::test[multicluster-sort_force-] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_take-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client4-year Utf8 NOT NULL-False] >> test.py::test[schema-select_fields_inferschema-] [GOOD] >> test.py::test[schema-user_schema_override-] [SKIPPED] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_validation.py::TestS3::test_nested_type[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/0007b2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_validation/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/0007b2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_validation/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1387040) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1389276 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf-] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-] >> test.py::test[join-equi_join_two_mult_keys-] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt] [GOOD] >> test.py::test[join-inner_grouped-] >> test.py::test[aggregate-aggregation_by_udf-] >> test.py::test[select-struct_access_without_table_name-] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table >> test.py::test[weak_field-weak_field_join-] [GOOD] >> test.py::test[window-distinct_over_window_full_frames-] [SKIPPED] >> test.py::test[window-row_number_to_map_multiple-default.txt] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[in-yql-14677-default.txt] [GOOD] >> test.py::test[insert-drop_sortness-] [SKIPPED] >> test.py::test[insert-override-with_read_udf] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000742/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000742/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1414414) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1416647 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[insert-unique_distinct_hints-] [SKIPPED] >> test.py::test[join-compact_join-] [SKIPPED] >> test.py::test[join-equi_join_by_expr-] >> test.py::test[join-convert_key-] [GOOD] >> test.py::test[join-full_equal_null-off] [SKIPPED] >> test.py::test[join-full_join-] >> test.py::test[optimizers-test_fuse_map_take-default.txt] [GOOD] >> test.py::test[order_by-literal_single_item_sort-] [SKIPPED] >> test.py::test[pg-join_using_multiple2-] [SKIPPED] >> test.py::test[pg-select_yql_type-] [SKIPPED] >> test.py::test[pg-tpcds-q56-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q68-default.txt] >> test.py::test[pg-tpcds-q68-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q77-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q84-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q04-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail-] [SKIPPED] >> test.py::test[produce-process_with_python_as_struct-default.txt] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin_list-] >> test.py::test[schema-row_spec_with_default_values-] [GOOD] >> test.py::test[schema-select_all-yamred_dsv] >> test_log_scenario.py::TestLogScenario::test[180] >> test.py::test[schema-select_all-yamred_dsv] [SKIPPED] >> test.py::test[select-autoextract_source_value-default.txt] >> test.py::test[join-mergejoin_with_different_key_names-] [GOOD] >> test.py::test[join-no_empty_join_for_dyn-off] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-off] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group-off] [SKIPPED] >> test.py::test[join-premap_context_dep-] [SKIPPED] >> test.py::test[join-pullup_random-] >> test.py::test[blocks-string_as_agg_key-] [GOOD] >> test.py::test[blocks-tuple_nth-] [SKIPPED] >> test.py::test[column_group-hint-single] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3-] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client4-year Utf8 NOT NULL-False] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail3-] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail-] [SKIPPED] >> test.py::test[column_group-publish-perusage] [SKIPPED] >> test.py::test[column_order-insert_with_new_cols-] [SKIPPED] >> test.py::test[column_order-union_all-default.txt] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client5-year Int64 NOT NULL-False] >> test.py::test[tpch-q20-default.txt] [GOOD] >> test.py::test[type_v3-append_diff_layout2-] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint-opt] >> test.py::test[window-row_number_to_map_multiple-default.txt] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt] [SKIPPED] >> test.py::test[window-win_func_aggr_4func-] >> test.py::test[join-inner_grouped-] [GOOD] >> test.py::test[join-join_comp_map_table-] >> ydb-library-benchmarks-report-ut::import_test [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-] [GOOD] >> test.py::test[join-star_join_mirror-off] [SKIPPED] >> test.py::test[join-star_join_semionly-off] [SKIPPED] >> test.py::test[join-two_aggrs-default.txt] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows |98.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/report/ut/import_test >> ydb-library-benchmarks-report-ut::import_test [GOOD] |98.1%| [TS] {RESULT} ydb/library/benchmarks/report/ut/import_test >> test.py::test[json-json_value/example-] [GOOD] >> test.py::test[key_filter-between_with_key_filter-] >> test.py::test[blocks-date_greater_or_equal_scalar-] [GOOD] >> test.py::test[blocks-distinct_pure_keys-] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt] [GOOD] >> test.py::test[select-match_clause-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test.py::test[select-autoextract_source_value-default.txt] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt] >> test.py::test[aggregate-aggregation_by_udf-] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt] >> test.py::test[join-pullup_random-] [GOOD] >> test.py::test[join-selfjoin_on_sorted-] >> ydb-tests-functional-sqs-common::import_test [GOOD] >> ServerRestartTest::RestartOnGetSession [GOOD] |98.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |98.1%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt] >> test.py::test[type_v3-ignore_v3_hint-opt] [GOOD] >> test.py::test[type_v3-uuid-] [SKIPPED] >> test.py::test[union_all-inner_union_all_with_limits-default.txt] [SKIPPED] >> test.py::test[union_all-mix_map_and_project-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client6-year Int32-False] >> ydb-tests-datashard-s3::import_test [GOOD] >> test.py::test[key_filter-between_with_key_filter-] [GOOD] >> test.py::test[key_filter-contains_tuples-default.txt] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table [GOOD] >> test.py::test[column_order-union_all-default.txt] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables-] [SKIPPED] >> test.py::test[dq-wrong_script-] [SKIPPED] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt] >> test.py::test[join-equi_join_by_expr-] [GOOD] >> test.py::test[join-full_equal_null-] >> test.py::test[join-full_join-] [GOOD] >> test.py::test[join-two_aggrs-default.txt] [GOOD] >> test.py::test[join-yql-14847-] >> test.py::test[join-full_trivial_udf_call-off] [SKIPPED] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off] [SKIPPED] >> test.py::test[join-left_join_null_column-off] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left-] >> test.py::test[produce-reduce_lambda_presort_twin_list-] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted] [SKIPPED] >> test.py::test[sampling-direct_read-] [SKIPPED] >> test.py::test[sampling-mapjoin_right_sample-default.txt] [SKIPPED] >> test.py::test[schema-concat-] [SKIPPED] >> test.py::test[schema-diffrerent_schemas-] |98.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/import_test >> ydb-tests-datashard-s3::import_test [GOOD] |98.2%| [TS] {RESULT} ydb/tests/datashard/s3/import_test >> test.py::test[join-left_join_right_pushdown_nested_left-] [SKIPPED] >> test.py::test[join-lookupjoin_semi-] >> test.py::test[window-win_func_aggr_4func-] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt] >> test.py::test[blocks-distinct_pure_keys-] [GOOD] >> test.py::test[blocks-filter_partial_expr-] >> test.py::test[join-join_comp_map_table-] [GOOD] >> test.py::test[join-lookupjoin_inner-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o-off] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [GOOD] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test.py::test[join-selfjoin_on_sorted-] [GOOD] >> test.py::test[join-star_join_inners_premap-off] [SKIPPED] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.2%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> test.py::test[union_all-mix_map_and_project-] [GOOD] >> test.py::test[weak_field-weak_field_long_name-] >> test.py::test[join-star_join_multi-off] [SKIPPED] >> test.py::test[join-strict_keys-] [SKIPPED] >> test.py::test[join-three_equalities-off] [SKIPPED] >> test.py::test[join-yql-4275-] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt] [GOOD] >> test.py::test[tpch-q22-default.txt] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets >> test.py::test[select-match_clause-] [GOOD] >> test.py::test[select-refselect-] >> test.py::test[select-refselect-] [SKIPPED] >> test.py::test[select-result_rows_limit-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client6-year Int32-False] [GOOD] >> test.py::test[join-yql-14847-] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt] >> test.py::test[aggregate-avg_and_sum-default.txt] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client7-year Uint32-False] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_list-] >> test.py::test[key_filter-contains_tuples-default.txt] [GOOD] >> test.py::test[limit-sort_calc_limit-] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt] >> test.py::test[lineage-list_literal1-default.txt] [SKIPPED] >> test.py::test[lineage-scalar_context-] [SKIPPED] >> test.py::test[optimizers-keep_sort_with_renames-] [SKIPPED] >> test.py::test[select-qualified_all_and_group_by-default.txt] [GOOD] >> test.py::test[select-sample_limit_recordindex-] [SKIPPED] >> test.py::test[optimizers-length_over_merge-] >> test.py::test[join-full_equal_null-] [GOOD] >> test.py::test[join-full_trivial-] >> test.py::test[schema-diffrerent_schemas-] [GOOD] >> test.py::test[schema-insert-read_schema] [SKIPPED] >> test.py::test[schema-read_schema_other-] >> test.py::test[blocks-filter_partial_expr-] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt] >> test.py::test[blocks-if-] >> test.py::test[action-eval_pragma-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_PROFILE-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt] [GOOD] >> test.py::test[window-win_func_in_lib-] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] >> test.py::test[window-win_func_over_group_by_list_names-] >> test.py::test[join-mapjoin_on_tablerecord-] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type-] >> test.py::test[tpch-q22-default.txt] [GOOD] >> test.py::test[tpch-q7-default.txt] >> test.py::test[select-result_rows_limit-] [GOOD] >> test.py::test[weak_field-weak_field_long_name-] [GOOD] >> test.py::test[window-generic/aggregations_before_current-] >> test.py::test[aggregate-avg_and_sum_by_value-] [GOOD] >> test.py::test[select-reuse_named_node-default.txt] >> test.py::test[join-lookupjoin_semi-] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join-] >> test.py::test[join-yql-4275-] [GOOD] >> test.py::test[key_filter-is_null_or_data-] >> ydb-tests-example::import_test [GOOD] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt] [SKIPPED] >> test.py::test[weak_field-hor_join_with_mix_weak_access-] >> test.py::test[key_filter-dependent_value-default.txt] [GOOD] >> test.py::test[key_filter-uuid-] [SKIPPED] >> test.py::test[library-package-] [SKIPPED] >> test.py::test[limit-insert_with_limit-] [SKIPPED] >> test.py::test[lineage-flatten_by-] [SKIPPED] >> test.py::test[lineage-grouping_sets-] >> test.py::test[lineage-grouping_sets-] [SKIPPED] >> test.py::test[lineage-select_join-default.txt] [SKIPPED] >> test.py::test[multicluster-insert_fill-] [SKIPPED] >> test.py::test[optimizers-reduce_with_aux_sort_column-] [SKIPPED] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc-] [SKIPPED] >> test.py::test[order_by-assume_with_filter-] [SKIPPED] >> test.py::test[order_by-literal-] [SKIPPED] >> test.py::test[order_by-literal_take_zero_sort-] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc-] |98.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/import_test >> ydb-tests-example::import_test [GOOD] |98.2%| [TS] {RESULT} ydb/tests/example/import_test >> test.py::test[action-eval_pragma-] [GOOD] >> test.py::test[action-eval_range-] >> test.py::test[order_by-native_desc_sort_calc-] [SKIPPED] >> test.py::test[order_by-order_by_dynum-default.txt] >> test.py::test[action-eval_range-] [SKIPPED] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt] [SKIPPED] >> test.py::test[aggr_factory-hll-default.txt] [SKIPPED] >> test.py::test[aggr_factory-max_by-default.txt] [SKIPPED] >> test.py::test[aggr_factory-top_by-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt] >> solomon_recipe_grpc::import_test [GOOD] >> test.py::test[blocks-if-] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [GOOD] >> test.py::test[schema-read_schema_other-] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort] >> test.py::test[flatten_by-flatten_list-] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client8-year Int64-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test.py::test[join-mapjoin_on_very_complex_type-] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off] [SKIPPED] >> test.py::test[join-mergejoin_force_align2-] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-] >> test.py::test[window-win_func_over_group_by_list_names-] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix-] >> test.py::test[select-reuse_named_node-default.txt] [GOOD] >> test.py::test[select-sampleselect-1000] >> test.py::test[key_filter-is_null_or_data-] [GOOD] >> test.py::test[key_filter-tzdate-] [SKIPPED] >> test.py::test[key_filter-utf8_with_legacy-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000724/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000724/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1419021) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000724/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v2-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1421367 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000724/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v2-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 03:24:03] send response localhost:24740/?database=local ::1 - - [05/May/2025 03:24:03] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |98.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator_grpc/import_test >> solomon_recipe_grpc::import_test [GOOD] |98.2%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator_grpc/import_test >> test.py::test[aggregate-group_by_column_alias_reuse_for_join-] [GOOD] >> test.py::test[aggregate-group_by_cube_duo-] >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> TopicSessionTests::BadDataSessionError >> test.py::test[join-full_trivial-] [GOOD] >> test.py::test[join-inner_grouped-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort-off] [SKIPPED] >> test.py::test[join-mergejoin_semi_composite_to_inner-off] >> test.py::test[join-join_no_correlation_in_order_by-off] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-] [SKIPPED] >> test.py::test[optimizers-length_over_merge-] [GOOD] >> test.py::test[join-join_without_correlation_names-] >> test.py::test[optimizers-sorted_sql_in-] [SKIPPED] >> test.py::test[join-mergejoin_semi_composite_to_inner-off] [SKIPPED] >> test.py::test[join-mergejoin_semi_to_inner-off] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep-] [SKIPPED] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v2-true-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/0007a6/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/0007a6/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1387599) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1390248 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[order_by-order_by_dynum-default.txt] [GOOD] >> test.py::test[window-generic/aggregations_before_current-] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt] >> test.py::test[tpch-q7-default.txt] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-] >> test.py::test[order_by-order_by_num_key_and_subkey_desc-] >> test.py::test[weak_field-hor_join_with_mix_weak_access-] [GOOD] >> test.py::test[window-current/session_incompat_sort-] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> test.py::test[flatten_by-struct_without_correlation-default.txt] [GOOD] >> test.py::test[hor_join-merge_multiouts_part-] [SKIPPED] >> test.py::test[hor_join-sorted_out-] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test.py::test[schema-select_all-row_spec_hide_sort] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field] >> test.py::test[join-nested_semi_join-off] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-] >> test.py::test[blocks-interval_sub_interval_scalar-] [GOOD] >> test.py::test[column_group-hint_anon_groups-disable] [SKIPPED] >> test.py::test[column_group-hint_append_fail-] [SKIPPED] >> test.py::test[column_order-select_groupby_with_star-default.txt] >> test.py::test[select-sampleselect-1000] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt] >> test.py::test[join-join_without_correlation_names-] [GOOD] >> test.py::test[join-left_semi_with_other-] >> test.py::test[aggregate-group_by_cube_duo-] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having-] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt] >> test.py::test[key_filter-utf8_with_legacy-] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter-] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix-] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt] >> test.py::test[key_filter-yql-8117-table_key_filter-] [SKIPPED] >> test.py::test[key_filter-yql-8663-dedup_ranges-] >> test.py::test[type_v3-ignore_v3_hint-] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs-] [GOOD] >> test.py::test[order_by-literal_empty_list_sort-] [SKIPPED] >> test.py::test[order_by-order_by_tablerow_column-] >> test.py::test[window-presort_window_partition_by_table-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all-] >> test.py::test[type_v3-mergejoin_with_sort-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client8-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client9-year Uint64-False] >> test.py::test[window-current/session_incompat_sort-] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt] [SKIPPED] >> test.py::test[hor_join-sorted_out-] [GOOD] >> test.py::test[window-full/aggregations-] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on-] [SKIPPED] >> test.py::test[join-opt_on_opt_side-] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [GOOD] >> test.py::test[in-in_compact_distinct-] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> test.py::test[join-left_semi_with_other-] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o-] >> test.py::test[aggregate-aggregate_by_one_column-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested-] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt] [GOOD] >> test.py::test[view-file_eval-] [SKIPPED] >> test.py::test[view-file_inner_library-] [SKIPPED] >> test.py::test[view-init_view_after_eval-default.txt] [SKIPPED] >> test.py::test[order_by-order_by_num_key_and_subkey_desc-] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt] [SKIPPED] >> test.py::test[pg-select_starref1-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q90-default.txt] >> test.py::test[view-secure_eval-] [SKIPPED] >> test.py::test[window-full/leadlag-] >> test.py::test[aggregate-group_by_expr_and_having-] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt] >> TopicSessionTests::BadDataSessionError [GOOD] >> test.py::test[pg-tpcds-q90-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q08-default.txt] >> TopicSessionTests::WrongFieldType >> test.py::test[pg-tpch-q08-default.txt] [SKIPPED] >> test.py::test[produce-discard_process_with_lambda-default.txt] [SKIPPED] >> test.py::test[produce-discard_reduce_lambda-] >> test.py::test[join-nopushdown_filter_over_inner-] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off] >> test.py::test[produce-discard_reduce_lambda-] [SKIPPED] >> test.py::test[produce-process_sorted_multi_out-] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple-] >> test.py::test[join-premap_nonseq_flatmap-off] [SKIPPED] >> test.py::test[join-pullup_null_column-off] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_rename-off] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple-] [SKIPPED] >> test.py::test[ql_filter-integer_single_equals-] [SKIPPED] >> test.py::test[result_types-data-default.txt] >> test.py::test[join-selfjoin_on_sorted_with_rename-off] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-] >> test.py::test[result_types-data-default.txt] [SKIPPED] >> test.py::test[result_types-singular-default.txt] [SKIPPED] >> test.py::test[column_order-select_groupby_with_star-default.txt] [GOOD] >> test.py::test[count-count_all_grouped-] >> test.py::test[sampling-bind_default-default.txt] [SKIPPED] >> test.py::test[sampling-bind_join_left-default.txt] >> test.py::test[sampling-bind_join_left-default.txt] [SKIPPED] >> test.py::test[schema-select_all_inferschema_limit-] [SKIPPED] >> test.py::test[order_by-order_by_tablerow_column-] [GOOD] >> ConsistentIndexRead::InteractiveTx >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test.py::test[schema-select_all_inferschema_range_empty_fail-] [SKIPPED] >> test.py::test[select-deep_udf_call-] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt] [GOOD] >> test.py::test[ytflow-select_over_static-] [SKIPPED] >> test.py::test[order_by-sort_with_take_limit-] [SKIPPED] >> test.py::test[pg-nulls_native-default.txt] [SKIPPED] >> test.py::test[pg-select_starref2-default.txt] [SKIPPED] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> test.py::test[schema-select_with_map-partial_read_schema] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client9-year Uint64-False] [GOOD] >> test.py::test[pg-select_subquery-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q12-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q34-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q59-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q94-default.txt] >> test.py::test[window-win_func_aggr_with_qualified_all-] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client10-year String NOT NULL-True] >> test.py::test[pg-tpcds-q94-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q22-default.txt] [SKIPPED] >> test.py::test[produce-process_trivial_as_struct-default.txt] >> test.py::test[window-full/aggregations-] [GOOD] >> test.py::test[window-generic/aggregations_after_current-] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test.py::test[type_v3-mergejoin_with_sort-] [GOOD] >> test.py::test[udf-udf-] [SKIPPED] >> test.py::test[weak_field-weak_field_wrong_types_fail-] [SKIPPED] >> test.py::test[window-current/aggregations_leadlag-] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test.py::test[key_filter-yql-8663-dedup_ranges-] [GOOD] >> test.py::test[library-package_override-] [SKIPPED] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt] >> test.py::test[aggregate-group_by_full_path-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_distinct-] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test.py::test[window-full/leadlag-] [GOOD] >> test.py::test[window-full/session_aliases-] >> test.py::test[join-lookupjoin_inner_1o-] [GOOD] >> test.py::test[join-mapjoin_left_null_column-] >> test.py::test[aggregate-aggregate_udf_nested-] [GOOD] >> test.py::test[aggregate-compare_by-] |98.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/py3test >> test.py::test[ytflow-select_over_static-] [SKIPPED] >> test.py::test[join-opt_on_opt_side-] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-] >> test.py::test[in-in_compact_distinct-] [GOOD] >> test.py::test[insert-after_group_by-default.txt] [SKIPPED] >> test.py::test[insert-fail_read_view_after_modify-] [SKIPPED] >> test.py::test[insert-from_erasure_to_none-] [SKIPPED] >> test.py::test[insert-keepmeta_proto_fail-] [SKIPPED] >> test.py::test[insert-literals_to_string-default.txt] [SKIPPED] >> test.py::test[insert_monotonic-keep_meta-default.txt] [SKIPPED] >> test.py::test[schema-select_with_map-partial_read_schema] [GOOD] >> test.py::test[schema-user_schema_append-] [SKIPPED] >> test.py::test[select-backtick_with_escapes-default.txt] >> test.py::test[join-alias_where_group-] >> test.py::test[produce-process_trivial_as_struct-default.txt] [GOOD] >> test.py::test[produce-process_with_udf-default.txt] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test.py::test[window-generic/aggregations_after_current-] [GOOD] >> test.py::test[window-generic/session-] |98.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/py3test >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing >> test.py::test[join-yql-10654_pullup_with_sys_columns-] [GOOD] >> test.py::test[key_filter-contains_optional-] >> oltp_workload::import_test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client10-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client11-year String-False] >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> test.py::test[window-current/aggregations_leadlag-] [GOOD] >> test.py::test[window-distinct_over_window-] [SKIPPED] >> test.py::test[window-win_func_first_last_with_part-] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple |98.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/import_test >> oltp_workload::import_test [GOOD] |98.3%| [TS] {RESULT} ydb/tests/stress/oltp_workload/import_test >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [GOOD] |98.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] >> test.py::test[count-count_all_grouped-] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt] |98.3%| [TS] {RESULT} ydb/tests/olap/column_family/compression/import_test >> TopicSessionTests::WrongFieldType [GOOD] >> test.py::test[expr-tagged_runtime-default.txt] [SKIPPED] >> test.py::test[expr-yql-10180-default.txt] [SKIPPED] >> test.py::test[window-full/session_aliases-] [GOOD] >> test.py::test[aggregate-group_by_session_distinct-] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct-] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt] >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> test.py::test[hor_join-filters-] >> test.py::test[window-generic/session_aliases-] >> test.py::test[select-backtick_with_escapes-default.txt] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt] >> test.py::test[join-alias_where_group-] [GOOD] >> test.py::test[join-anyjoin_common_nodup-] >> test.py::test[select-deep_udf_call-] [GOOD] >> test.py::test[select-exists_true-default.txt] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns-] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all-] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> test.py::test[join-mapjoin_left_null_column-] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off] >> test.py::test[action-discard-default.txt] [SKIPPED] >> test.py::test[join-mapjoin_left_null_column-off] [SKIPPED] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-] >> test.py::test[action-eval_input_output_table_subquery-] [SKIPPED] >> test.py::test[action-evaluate_match_type-default.txt] [SKIPPED] >> test.py::test[action-evaluate_pure-] >> test.py::test[key_filter-contains_optional-] [GOOD] >> test.py::test[key_filter-mixed_sort-] [SKIPPED] >> test.py::test[key_filter-string_with-default.txt] |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test.py::test[produce-process_with_udf-default.txt] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt] >> test.py::test[produce-reduce_all_list-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume-] [SKIPPED] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap-] [SKIPPED] >> test.py::test[produce-reduce_with_flat_lambda-default.txt] >> test.py::test[window-win_func_first_last_with_part-] [GOOD] >> test.py::test[window-win_multiaggr_library-] [SKIPPED] >> test.py::test[aggregate-compare_by-] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt] >> test.py::test[join-opt_on_opt_side_with_group-] [GOOD] >> test.py::test[window-generic/session-] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt] >> test.py::test[join-premap_common_inner_both_sides-] [SKIPPED] >> test.py::test[join-premap_merge_inner-off] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-off] >> statistics_workload::import_test [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt] [SKIPPED] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client11-year String-False] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] >> test.py::test[join-premap_merge_with_remap-off] [SKIPPED] >> test.py::test[join-pullup_left_semi-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client12-year Utf8-False] >> test.py::test[limit-yql-9617_empty_lambda-default.txt] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda-] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt] [SKIPPED] >> test.py::test[lineage-list_literal2-default.txt] [SKIPPED] >> test.py::test[lineage-pullup_rename-] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt] >> test.py::test[window-win_func_rank_by_opt_all-] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [GOOD] |98.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/import_test >> statistics_workload::import_test [GOOD] |98.3%| [TS] {RESULT} ydb/tests/stress/statistics_workload/import_test >> test.py::test[select-exists_true-default.txt] [GOOD] >> test.py::test[select-logical_ops-default.txt] >> test.py::test[aggregate-group_by_session_only_distinct-] [GOOD] >> test.py::test[aggregate-subquery_aggregation-] >> test.py::test[lineage-select_field_rename-default.txt] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts-] [SKIPPED] >> test.py::test[optimizers-yql-12620_stage_multiuse-] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> test.py::test[select-cast_double_to_uint32-default.txt] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt] >> test.py::TestSqsSplitMergeFifoTables::test_fifo_merge_split >> test.py::test[hor_join-filters-] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-] >> test.py::test[join-anyjoin_common_nodup-] [GOOD] >> test.py::test[join-bush_in_in-] >> test.py::test[hor_join-fuse_multi_outs1-] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse-] [SKIPPED] >> test.py::test[insert-replace_inferred_op-] >> test.py::test[insert-replace_inferred_op-] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys-] >> test.py::test[aggregate-group_by_column-default.txt] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping-] >> test.py::test[key_filter-string_with-default.txt] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt] >> test.py::test[lineage-flatten_where-default.txt] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge-] [SKIPPED] |98.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/py3test >> test.py::test[window-win_multiaggr_library-] [SKIPPED] >> test.py::test[optimizers-field_subset_for_multiusage-] [SKIPPED] >> test.py::test[optimizers-keepworld_emptyflatmap-] [SKIPPED] >> test.py::test[action-eval_sample-] >> test.py::test[join-pullup_left_semi-] [GOOD] >> test.py::test[join-yql-16011-] [SKIPPED] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate-] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads-] >> test.py::test[json-jsondocument/insert-] [SKIPPED] >> test.py::test[key_filter-complex-default.txt] >> test.py::test[action-eval_sample-] [SKIPPED] >> test.py::test[action-export_action-] >> test.py::test[action-export_action-] [SKIPPED] >> test.py::test[action-insert_each_from_folder-] [SKIPPED] >> test.py::test[action-evaluate_pure-] [GOOD] >> test.py::test[aggr_factory-count-default.txt] [SKIPPED] >> test.py::test[aggregate-agg_filter_pushdown-] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt] >> test.py::test[select-logical_ops-default.txt] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse-] [GOOD] >> test.py::test[optimizers-yql-17413-topsort-] >> test.py::test[window-generic/session_aliases-] [GOOD] >> test.py::test[window-lagging/aggregations-] >> test.py::test[table_range-concat_with_view-] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf-] [SKIPPED] >> test.py::test[table_range-range_over_like-] [SKIPPED] >> test.py::test[table_range-range_over_regexp-] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-] [GOOD] >> test.py::test[join-premap_common_left_cross-] [SKIPPED] >> test.py::test[join-premap_map_semi-] >> test.py::test[produce-reduce_with_flat_lambda-default.txt] [GOOD] >> test.py::test[sampling-reduce-with_premap] [SKIPPED] >> test.py::test[sampling-yql-14664_deps-default.txt] [SKIPPED] >> test.py::test[schema-user_schema_no_infer-] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client12-year Utf8-False] [GOOD] >> test.py::test[tpch-q13-default.txt] >> test.py::test[join-premap_map_semi-] [SKIPPED] >> test.py::test[join-premap_no_premap-off] [SKIPPED] >> test.py::test[join-prune_keys-] [SKIPPED] >> test.py::test[join-pullup_context_dep-off] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-off] [SKIPPED] >> test.py::test[join-yql-8131-off] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client13-year Date-False] |98.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/py3test >> test.py::test[window-win_func_rank_by_opt_all-] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/py3test >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_PROFILE-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00070f/ydb/tests/fq/plans/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00070f/ydb/tests/fq/plans/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1510099) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1512050 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt] [GOOD] >> test.py::test[window-win_func_spec_with_part-] >> test_db_counters.py::TestStorageCounters::test_storage_counters[disable_separate_quotas] [GOOD] >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt] [GOOD] >> test.py::test[select-hits_count-] >> test.py::test[select-hits_count-] [SKIPPED] >> test.py::test[select-one_labeled_column-default.txt] >> test.py::test[join-bush_in_in-] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval-] >> test.py::test[aggregate-subquery_aggregation-] [GOOD] >> test.py::test[blocks-block_output_various_types-] [SKIPPED] >> test.py::test[blocks-combine_all_avg_filter-] >> ydb-tests-sql-large::import_test [GOOD] |98.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |98.4%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [GOOD] >> test.py::test[join-commonjoin_unused_keys-] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> test.py::test[join-join_and_distinct_key-off] [SKIPPED] |98.4%| [TM] {RESULT} ydb/tests/fq/plans/py3test >> test.py::test[join-mergejoin_choose_primary-off] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-off] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort-] >> test.py::test[optimizers-unused_columns_window_no_payloads-] [GOOD] >> test.py::test[optimizers-yql-5833-table_content-] >> test.py::test[optimizers-yql-17413-topsort-] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract-] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt] [GOOD] >> test.py::test[aggregate-dedup_state_keys-] |98.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] |98.4%| [TS] {RESULT} ydb/tests/sql/large/import_test |98.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/py3test >> test.py::test[key_filter-complex-default.txt] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt] >> test.py::test[aggregate-group_by_cube_grouping-] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d4b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d4b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1096397 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> ydb-tests-fq-http_api::import_test [GOOD] >> test_db_counters.py::TestStorageCounters::test_storage_counters[enable_separate_quotas] >> test.py::test[aggregate-agg_filter_pushdown-] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping-] >> test.py::test[tpch-q13-default.txt] [GOOD] >> test.py::test[tpch-q14-default.txt] |98.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/py3test |98.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |98.4%| [TS] {RESULT} ydb/tests/fq/http_api/import_test >> test.py::test[window-lagging/aggregations-] [GOOD] >> test.py::test[window-row_number_to_map-default.txt] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test.py::test[join-yql-8131-off] [GOOD] >> test.py::test[json-json_exists/example-] >> test.py::test[action-action_eval_cluster_and_table-default.txt] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client13-year Date-False] [GOOD] >> test.py::test[schema-user_schema_no_infer-] [GOOD] >> test.py::test[schema-user_schema_patch_columns-] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> BasicExample::BasicExample >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] >> test.py::test[action-subquery_merge2-default.txt] [SKIPPED] >> test.py::test[action-subquery_merge_evaluate-default.txt] [SKIPPED] >> test.py::test[action-subquery_merge_nested_subquery-] [SKIPPED] >> test.py::test[agg_apply-avg_const_interval-] [SKIPPED] >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> test.py::test[aggr_factory-corellation-default.txt] [SKIPPED] >> test.py::test[aggr_factory-stddev-default.txt] [SKIPPED] >> test.py::test[aggr_factory-top-default.txt] [SKIPPED] >> test.py::test[aggr_factory-udaf-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_gs_and_having-default.txt] >> TopicSessionTests::ReadNonExistentTopic >> test.py::test[join-equi_join_three_asterisk_eval-] [GOOD] >> test.py::test[join-full_join-off] [SKIPPED] >> test.py::test[join-grace_join1-] [SKIPPED] >> test.py::test[join-inner_all-] >> test_query_cache.py::TestQueryCache::test >> test.py::test[join-mergejoin_saves_output_sort-] [GOOD] >> test.py::test[join-premap_common_multiparents-] [SKIPPED] >> test.py::test[join-pullup_cross-off] >> test.py::test[aggregate-dedup_state_keys-] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling-] [SKIPPED] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt] >> test.py::test[window-win_func_spec_with_part-] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt] >> test.py::test[join-pullup_cross-off] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract-] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps-] >> test.py::test[optimizers-yql-5833-table_content-] [GOOD] >> test.py::test[order_by-literal_desc-] [SKIPPED] >> BasicExample::BasicExample [GOOD] >> test.py::test[select-one_labeled_column-default.txt] [GOOD] >> test.py::test[select-optional_in_job-] [SKIPPED] >> test.py::test[select-trivial_where-many] >> test.py::test[order_by-union_all-] [SKIPPED] >> test.py::test[pg-join_using_tables4-default.txt] [SKIPPED] >> test.py::test[pg-pg_column_case-] [SKIPPED] >> test.py::test[pg-tpcds-q10-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q29-default.txt] >> test.py::test[pg-tpcds-q29-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q39-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q44-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q72-default.txt] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> test.py::test[blocks-combine_all_avg_filter-] [GOOD] >> test.py::test[blocks-combine_all_max-] >> test.py::test[pg-tpcds-q72-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q75-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_fail-] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream-] [SKIPPED] >> test.py::test[produce-reduce_lambda_list_table-] >> test.py::test[window-row_number_to_map-default.txt] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted-] >> test.py::test[key_filter-empty_range-] >> test.py::test[aggregate-group_by_expr_lookup-] [GOOD] >> test.py::test[json-json_exists/example-] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic-] >> test.py::test[aggregate-group_by_expr_only_join-] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> ydb-library-benchmarks-template-ut::import_test [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic-] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] |98.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off] [SKIPPED] >> test.py::test[join-yql-14847-off] [SKIPPED] >> test.py::test[key_filter-convert-] >> test.py::test[aggregate-group_by_gs_and_having-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_simp-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps-] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt] |98.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/template/ut/import_test >> ydb-library-benchmarks-template-ut::import_test [GOOD] >> test.py::test[join-inner_all-] [GOOD] >> test.py::test[join-inner_on_key_only-off] [SKIPPED] >> test.py::test[join-inner_trivial-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] >> test.py::test[tpch-q14-default.txt] [GOOD] |98.4%| [TS] {RESULT} ydb/library/benchmarks/template/ut/import_test |98.4%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::test[produce-reduce_lambda_list_table-] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin-] >> test.py::test[tpch-q8-default.txt] >> test.py::test[select-trivial_where-many] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail-] >> test.py::test[aggregate-aggrs_no_grouping-] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio-] >> test.py::test[simple_columns-simple_columns_base_fail-] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt] [SKIPPED] >> test.py::test[table_range-concat_sorted_with_key_diff-] [SKIPPED] >> test.py::test[type_v3-type_subset-] [SKIPPED] >> test.py::test[schema-user_schema_patch_columns-] [GOOD] >> test.py::test[select-create_structures-default.txt] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt] [SKIPPED] >> test.py::test[view-trivial_view_concat-] [SKIPPED] >> test.py::test[weak_field-weak_field_opt-] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |98.5%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> TDqPqRdReadActorTests::Backpressure [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted-] [GOOD] >> test.py::test[window-win_func_aggr_stat-] |98.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/py3test >> test.py::test[ypath-direct_read_from_dynamic-] [SKIPPED] >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> TopicSessionTests::SlowSession >> test_query_cache.py::TestQueryCache::test [GOOD] >> test.py::test[join-inner_trivial-] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-] >> test.py::test[key_filter-convert-] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join-] [GOOD] >> test.py::test[aggregate-group_by_gs_duo-] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter-] >> ydb-tests-fq-mem_alloc::import_test [GOOD] >> test.py::test[aggregate-group_by_gs_simp-] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_expr_key-] [SKIPPED] >> test.py::test[aggregate-group_by_ru_join-] >> test.py::test[blocks-combine_all_max-] [GOOD] >> test.py::test[blocks-combine_hashed_min-] |98.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/py3test >> test.py::test[weak_field-weak_field_opt-] [GOOD] >> test.py::test[window-full/syscolumns-] [SKIPPED] >> test.py::test[window-win_func_first_last_rev-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] |98.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |98.5%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test >> test.py::test[order_by-order_by_tuple_and_member-default.txt] [GOOD] >> test.py::test[pg-select_from_columns-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio-] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count-] >> test.py::test[key_filter-empty_range_over_dynamic-] [GOOD] >> test.py::test[pg-select_from_columns-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q19-default.txt] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> test.py::test[like-ilike_clause-default.txt] >> test.py::test[pg-tpcds-q19-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q20-default.txt] >> test.py::test[window-win_func_aggr_stat-] [GOOD] >> test.py::test[window-win_func_first_last-] >> test.py::test[pg-tpcds-q20-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q42-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q63-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q73-default.txt] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q74-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q80-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q88-default.txt] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test.py::test[pg-tpcds-q88-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q18-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q21-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull-] [SKIPPED] >> test.py::test[produce-process_and_filter-default.txt] [SKIPPED] >> test.py::test[produce-process_rows_sorted_multi_out-] [SKIPPED] >> test.py::test[produce-reduce_all_field_subset-] [SKIPPED] >> test.py::test[produce-reduce_lambda_list_mem-default.txt] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin-] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype-] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort-] [SKIPPED] >> test.py::test[produce-reduce_with_python_row-] [SKIPPED] >> test.py::test[sampling-bind_topsort-default.txt] [SKIPPED] >> test.py::test[schema-insert_sorted-row_spec] [SKIPPED] >> test.py::test[schema-select_all-row_spec] >> test.py::test[aggregate-group_by_expr_with_where-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup-] >> test.py::test[blocks-combine_hashed_min-] [GOOD] >> test.py::test[blocks-combine_hashed_set-] >> test.py::test[aggregate-group_by_gs_duo-] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru-] >> test.py::test[key_filter-empty_range-] [GOOD] >> test.py::test[limit-dynamic_limit-] [SKIPPED] >> test.py::test[limit-dynamic_sort_limit-] [SKIPPED] >> test.py::test[limit-empty_read_after_limit-default.txt] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce-] [SKIPPED] >> test.py::test[optimizers-length_over_merge_fs_multiusage-] >> test.py::test[tpch-q8-default.txt] [GOOD] >> test.py::test[type_v3-singulars-] [SKIPPED] >> test.py::test[udf-python_script_from_file-] [SKIPPED] >> test.py::test[view-file_inner-] [SKIPPED] >> test.py::test[view-view_with_library-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_map-] >> test.py::test[select-create_structures-default.txt] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt] [SKIPPED] >> test.py::test[select-simple_struct_field_access-] >> ydb-tests-olap-load::import_test [GOOD] |98.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.5%| [TS] {RESULT} ydb/tests/library/ut/py3test >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off] [SKIPPED] >> test.py::test[join-join_without_correlation_and_struct_access-off] [SKIPPED] >> test.py::test[join-left_all-off] [SKIPPED] >> test.py::test[window-win_func_first_last_rev-] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple-] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions |98.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/import_test >> ydb-tests-olap-load::import_test [GOOD] |98.5%| [TS] {RESULT} ydb/tests/olap/load/import_test >> test.py::test[like-ilike_clause-default.txt] [GOOD] >> test.py::test[limit-limit-] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test.py::test[window-win_func_first_last-] [GOOD] >> test.py::test[window-win_func_lead_lag_worm-] >> test.py::test[limit-limit-] [SKIPPED] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> test.py::test[limit-yql-8046_empty_sorted_desc-] [SKIPPED] >> test.py::test[aggregate-group_by_cube_join_count-] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt] >> test.py::test[lineage-group_by_asstruct_key-default.txt] [SKIPPED] >> TDqPqRdReadActorTests::MetadataFields >> test.py::test[lineage-list_literal4-default.txt] [SKIPPED] >> test.py::test[lineage-select_all-default.txt] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt] [SKIPPED] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other-] [SKIPPED] >> test.py::test[order_by-SortByOneFieldDesc-] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> kqprun_recipe::import_test [GOOD] >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] |98.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] |98.5%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> test.py::test[key_filter-lambda_with_null_filter-] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt] |98.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/py3test >> test.py::test[window-win_func_first_last_rev-] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] >> test.py::test[aggregate-group_by_ru_join-] [GOOD] >> test.py::test[schema-select_all-row_spec] [GOOD] >> test.py::test[schema-select_all_forceinferschema-] >> test.py::test[aggregate-group_by_ru_join_star-default.txt] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage-] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi-] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> test.py::test[weak_field-optimize_weak_fields_map-] [GOOD] >> test.py::test[window-rank/plain-] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> test.py::test[select-simple_struct_field_access-] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple-] [GOOD] >> test.py::test[join-left_only_semi_and_other-] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt] |98.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/py3test >> test.py::test[aggregate-group_by_mul_gs_ru-] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind-] >> test.py::test[produce-reduce_lambda_list_mem-default.txt] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype-] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps-] [SKIPPED] >> test.py::test[sampling-insert-] [SKIPPED] >> test.py::test[sampling-join_right_sample-default.txt] [SKIPPED] >> test.py::test[sampling-mapjoin_left_sample-default.txt] [SKIPPED] >> test.py::test[sampling-subquery_default-default.txt] [SKIPPED] >> test.py::test[sampling-subquery_mapjoin-default.txt] [SKIPPED] >> test.py::test[schema-copy-yamred_dsv_raw] [SKIPPED] >> test.py::test[schema-select_all-schema] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only >> test.py::test[order_by-SortByOneFieldDesc-] [GOOD] >> test.py::test[order_by-assume_over_input-] [SKIPPED] >> test.py::test[order_by-order_by_tablepath_column-] >> test.py::test[blocks-combine_hashed_set-] [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption >> TopicSessionTests::SlowSession [GOOD] >> olap_workload::import_test [GOOD] >> test.py::test[blocks-date_greater_or_equal-] >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test.py::test[window-win_func_lead_lag_worm-] [GOOD] >> test.py::test[window-win_func_over_group_by_compl-] |98.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/import_test >> olap_workload::import_test [GOOD] |98.6%| [TS] {RESULT} ydb/tests/stress/olap_workload/import_test >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage-] >> test.py::test[key_filter-yql_5895_or-default.txt] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TDqPqReadActorTest::TestReadFromTopic >> test.py::test[optimizers-unused_columns_group_one_of_multi-] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt] >> test.py::test[aggregate-group_by_gs_flatten-default.txt] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names-] >> test.py::test[schema-select_all-schema] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] [GOOD] >> test.py::test[schema-select_all_inferschema_range-] [SKIPPED] >> test.py::test[schema-select_yamr_fields-] [SKIPPED] >> test.py::test[schema-skip_complex_type2-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test.py::test[schema-select_all_forceinferschema-] [GOOD] >> test.py::test[schema-select_all_inferschema_op-] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test.py::test[aggregate-group_by_ru_join_star-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_star-] >> test.py::test[window-rank/plain-] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt] >> test.py::test[order_by-order_by_tablepath_column-] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q21-default.txt] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt] [SKIPPED] >> test.py::test[tpch-q17-default.txt] >> test.py::test[pg-tpcds-q26-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q36-default.txt] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] >> test.py::test[blocks-date_greater_or_equal-] [GOOD] >> test.py::test[blocks-date_top_sort-] [SKIPPED] >> test.py::test[blocks-distinct_opt_state_all-] >> test.py::test[pg-tpcds-q36-default.txt] [SKIPPED] >> test.py::test[produce-reduce_with_assume_in_subquery-] >> ydb-tests-fq-common::import_test [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind-] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate-] [SKIPPED] >> test.py::test[aggregate-native_desc_group_compact_by-] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on] [SKIPPED] >> test.py::test[bigdate-tz_table_fill-] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-v3] [SKIPPED] >> test.py::test[blocks-date_less-] >> test.py::test[join-left_only_semi_and_other-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage-] [GOOD] >> test.py::test[join-mapjoin_dup_key-] >> test.py::test[aggregate-percentiles_ungrouped-] |98.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] |98.6%| [TS] {RESULT} ydb/tests/fq/common/import_test >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props-] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props-] [SKIPPED] >> test.py::test[order_by-SortByOneField-] >> test.py::test[limit-empty_input_after_limit-default.txt] [GOOD] >> test.py::test[lineage-member_over_if_struct-default.txt] [SKIPPED] >> test.py::test[window-win_func_over_group_by_compl-] [GOOD] >> test.py::test[lineage-reduce-default.txt] [SKIPPED] >> test.py::test[window-win_func_rank_by_opt_part-] >> test.py::test[lineage-select_union_all-default.txt] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill-] [SKIPPED] >> test.py::test[order_by-literal_with_assume_desc-] [SKIPPED] >> test.py::test[order_by-order_by_expr-] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names-] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse-] >> test.py::test[schema-select_all_inferschema_op-] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [GOOD] >> test.py::test[schema-skip_complex_type2-] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt] [GOOD] >> test.py::test[window-win_by_all_aggregate-] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test.py::test[aggregate-group_by_session_star-] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct-] >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow >> test.py::test[aggr_factory-boolor-default.txt] [SKIPPED] >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> test.py::test[aggregate-GroupByOneField-] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery-] [GOOD] >> test.py::test[ql_filter-integer_select_other-] [SKIPPED] >> test.py::test[sampling-orderedjoin_left_sample-default.txt] [SKIPPED] >> test.py::test[order_by-order_by_expr-] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt] >> test.py::test[sampling-reduce-] [SKIPPED] >> test.py::test[schema-skip_complex_type-] >> test.py::test[pg-doubles_search_path-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q05-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q15-default.txt] >> test.py::test[pg-tpcds-q15-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q33-default.txt] [SKIPPED] >> test.py::test[join-mapjoin_dup_key-] [GOOD] >> test.py::test[join-premap_common_cross-] [SKIPPED] >> test.py::test[join-premap_common_semi-off] [SKIPPED] >> test.py::test[join-pullup_exclusion-off] >> test.py::test[order_by-SortByOneField-] [GOOD] >> test.py::test[order_by-assume_with_transform_desc-] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit-] [SKIPPED] >> test.py::test[order_by-order_by_tuple_expr-default.txt] >> test.py::test[aggregate-percentiles_ungrouped-] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt] >> test.py::test[window-win_func_rank_by_opt_part-] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key-] >> test.py::test[blocks-date_less-] [GOOD] >> test.py::test[column_group-hint_anon_groups-single] [SKIPPED] >> test.py::test[column_group-hint_non_str_yson_fail-] [SKIPPED] >> test.py::test[column_order-select_where-default.txt] >> test.py::test[pg-tpcds-q41-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q60-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q93-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q14-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_presort-] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys-] [SKIPPED] >> test.py::test[produce-reduce_with_python_filter_and_having-] >> test.py::test[blocks-distinct_opt_state_all-] [GOOD] >> test.py::test[blocks-pg_to_numbers-] >> test.py::test[join-pullup_exclusion-off] [SKIPPED] >> test.py::test[join-pullup_left-] >> test.py::test[produce-reduce_with_python_filter_and_having-] [SKIPPED] >> test.py::test[ql_filter-integer_bounds-] [SKIPPED] >> test.py::test[ql_filter-integer_many_noskiff-] [SKIPPED] >> test.py::test[schema-def_values-] >> test.py::test[schema-user_schema_bind-default.txt] [GOOD] >> test.py::test[schema-user_schema_with_sort-] >> test.py::test[schema-select_operate_with_columns_simple-default.txt] [GOOD] >> test.py::test[select-column_labels-default.txt] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse-] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping-] >> test.py::test[tpch-q17-default.txt] [GOOD] >> test.py::test[tpch-q18-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> test.py::test[aggregate-GroupByOneField-] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt] >> test.py::test[ansi_idents-escaped_udf_name-default.txt] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt] [SKIPPED] >> test.py::test[join-pullup_left-] [GOOD] >> test.py::test[join-pullup_rownumber-off] [SKIPPED] >> test.py::test[join-yql-12022-off] [SKIPPED] >> test.py::test[join-yql-14829_leftonly-off] [SKIPPED] >> test.py::test[key_filter-string_with_legacy-] >> test.py::test[binding-table_from_binding_inferscheme-default.txt] >> test.py::test[window-win_by_all_aggregate-] [GOOD] >> test.py::test[window-win_with_cur_row-] >> test.py::test[schema-def_values-] [GOOD] >> test.py::test[schema-insert_sorted-read_schema] [SKIPPED] >> test.py::test[schema-user_schema_mix2-] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> test.py::test[blocks-pg_to_numbers-] [GOOD] >> test.py::test[column_order-select_where-default.txt] [GOOD] >> test.py::test[dq-pool_trees_whitelist-] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type-] >> test.py::test[blocks-top_sort_one_asc-] >> nemesis::import_test [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace >> test.py::test[epochs-use_sorted_by_complex_type-] [SKIPPED] >> test.py::test[epochs-write_and_use_in_same_epoch-] [SKIPPED] >> test.py::test[file-file_list_simple-] [SKIPPED] >> test.py::test[file-where_key_in_file_content_typed-] [SKIPPED] >> test.py::test[flatten_by-flatten_with_join-] >> test.py::test[select-column_labels-default.txt] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt] >> test.py::test[aggregate-group_compact_sorted_distinct-] [GOOD] >> test.py::test[aggregate-library_error_in_aggregation_fail-] [SKIPPED] >> test.py::test[bigdate-table_explicit_cast-default.txt] >> test.py::test[order_by-order_by_tuple_expr-default.txt] [GOOD] >> test.py::test[schema-skip_complex_type-] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt] [SKIPPED] >> test.py::test[pg-in_mixed-] [SKIPPED] >> test.py::test[schema-yamred_dsv_select_from_dict-] [SKIPPED] >> test.py::test[pg-tpcds-q14-default.txt] >> test.py::test[select-anon_clash-] [SKIPPED] >> test.py::test[select-bit_ops-default.txt] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test.py::test[pg-tpcds-q14-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q25-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q61-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q92-default.txt] |98.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/import_test >> nemesis::import_test [GOOD] |98.6%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/import_test >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> test.py::test[pg-tpcds-q92-default.txt] [SKIPPED] >> test.py::test[produce-process_multi_in_trivial_lambda-] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> test.py::test[schema-user_schema_with_sort-] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key-] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_gs-] >> test.py::test[window-win_func_rank_with_order_by_aggr_key-] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt] [SKIPPED] >> test.py::test[ypath-limit_with_range-default.txt] |98.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.6%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> test.py::test[binding-table_from_binding_inferscheme-default.txt] [GOOD] >> test.py::test[binding-table_regexp_binding-] [SKIPPED] >> test.py::test[blocks-add_uint16-] >> test.py::test[ypath-limit_with_range-default.txt] [SKIPPED] >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] [GOOD] >> test.py::test[schema-user_schema_mix2-] [GOOD] >> TopicSessionTests::RestartSessionIfQueryStopped >> test.py::test[select-calculated_values-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] >> test.py::test[aggregate-group_by_rollup_grouping-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg-] |98.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.6%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> test.py::test[tpch-q18-default.txt] [GOOD] >> test.py::test[tpch-q3-default.txt] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter >> test.py::test[flatten_by-flatten_with_join-] [GOOD] >> test.py::test[hor_join-double_input-default.txt] >> test.py::test[select-corr_name_in_select_seq-default.txt] [GOOD] >> test.py::test[select-sampleselect-] >> test.py::test[produce-process_multi_in_trivial_lambda-] [GOOD] >> test.py::test[produce-reduce_all_opt-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_keytuple-] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref-] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted] [SKIPPED] >> test.py::test[ql_filter-integer_optional-] [SKIPPED] >> test.py::test[sampling-map-] [SKIPPED] >> test.py::test[schema-append_to_desc-] [SKIPPED] >> test.py::test[schema-select_all-yamred_dsv_raw] [SKIPPED] >> test.py::test[schema-select_all_inferschema2-] |98.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/py3test >> test.py::test[ypath-limit_with_range-default.txt] [SKIPPED] >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] |98.7%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/py3test >> ydb-tests-functional-clickbench::import_test [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt] >> TDqPqReadActorTest::TestSaveLoadPqRead >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test.py::test_counter [GOOD] >> test.py::test_viewer_nodes >> test.py::test[key_filter-string_with_legacy-] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt] |98.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |98.7%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test >> test.py::test[blocks-top_sort_one_asc-] [GOOD] >> test.py::test[column_group-hint_append_fail-diff_grp] [SKIPPED] >> test.py::test[column_order-select_win_func-default.txt] >> test.py::test[aggregate-group_by_mul_gs_gs-] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order-] [SKIPPED] >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo [GOOD] >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test[window-win_with_cur_row-] [GOOD] >> test.py::test[window-yql-15636-default.txt] [SKIPPED] >> test.py::test[ytflow-select-] [SKIPPED] >> test.py::test[ansi_idents-join_using-default.txt] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> test.py::test_viewer_tabletinfo >> test.py::test[select-calculated_values-default.txt] [GOOD] >> test.py::test[select-discard-default.txt] [SKIPPED] >> test.py::test[select-table_content_from_double_opt-default.txt] [SKIPPED] >> test.py::test[select-unlabeled_1000-] >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe [GOOD] >> test.py::test[select-bit_ops-default.txt] [GOOD] >> test.py::test[select-if-default.txt] >> ydb-tests-functional-minidumps::import_test [GOOD] >> test.py::test_viewer_cluster [GOOD] >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] [GOOD] >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck >> test.py::test[hor_join-double_input-default.txt] [GOOD] >> test.py::test[hor_join-out_range-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] |98.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] |98.7%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl >> test.py::test[bigdate-table_explicit_cast-default.txt] [GOOD] >> test.py::test[bigdate-table_yt_native-default] [SKIPPED] >> test.py::test[bigdate-tz_table_rw-] [SKIPPED] >> test.py::test[binding-bind_select-default.txt] >> test.py::test[select-sampleselect-] [GOOD] >> test.py::test[table_range-each_with_non_existing-] [SKIPPED] >> test.py::test[tpch-q1-default.txt] >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg-] [GOOD] >> test.py::test[aggregate-rollup_with_dict-] >> test.py::test[blocks-add_uint16-] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt] [SKIPPED] >> test.py::test[blocks-combine_all_some-] |98.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/py3test >> test.py::test[ytflow-select-] [SKIPPED] >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 >> test.py::test[tpch-q3-default.txt] [GOOD] >> test.py::test[tpch-q4-default.txt] [SKIPPED] >> test.py::test[type_v3-decimal_yt_llvm-] [SKIPPED] >> test.py::test[type_v3-insert_struct_v3_wo_native-] [SKIPPED] >> test.py::test[udf-udaf-] [SKIPPED] >> test.py::test[weak_field-weak_field_esc_string-] >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test_pqrb_tablet |98.7%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/py3test >> test.py::test_pqrb_tablet [GOOD] >> test.py::test_viewer_nodes_issue_14992 >> test.py::test[schema-select_all_inferschema2-] [GOOD] |98.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |98.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> test.py::test_viewer_nodes_issue_14992 [GOOD] >> test.py::test_operations_list [GOOD] >> test.py::test_operations_list_page [GOOD] >> test.py::test_operations_list_page_bad [GOOD] >> test.py::test_scheme_directory >> test.py::test[schema-select_reordered-default.txt] >> test.py::test_scheme_directory [GOOD] >> test.py::test_topic_data >> test.py::test[select-dot_name_subrequest-default.txt] [GOOD] >> test.py::test[select-host_count-] [SKIPPED] >> test.py::test[select-struct_members-default.txt] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] >> test.py::test[hor_join-out_range-default.txt] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt] [SKIPPED] >> test.py::test[in-in_enum_single0-default.txt] >> test.py::test[binding-bind_select-default.txt] [GOOD] >> test.py::test[binding-insert_binding-] [SKIPPED] >> test.py::test[blocks-date_less_scalar-] >> test.py::test[select-if-default.txt] [GOOD] >> test.py::test[select-missing_with_nonpersist-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] >> test.py::test[select-unlabeled_1000-] [GOOD] >> test.py::test[select-where_in-default.txt] >> test.py::test[lambda-lambda_simple-default.txt] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt] >> test.py::test[ansi_idents-join_using-default.txt] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt] >> test.py::test[tpch-q1-default.txt] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma-] >> test.py::test[weak_field-weak_field_esc_string-] [GOOD] >> test.py::test[weak_field-weak_field_join_condition-] >> ydb-tests-functional-rename::import_test [GOOD] >> test.py::test_topic_data [GOOD] >> test.py::test_transfer_describe >> test.py::test[column_order-select_win_func-default.txt] [GOOD] >> test.py::test[column_order-union_all_positional_unordered_fail-] [SKIPPED] >> test.py::test[count-boolean_count-] |98.7%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test_transfer_describe [GOOD] >> test.py::test[aggregate-rollup_with_dict-] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt] |98.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] |98.8%| [TS] {RESULT} ydb/tests/functional/rename/import_test |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[blocks-combine_all_some-] [GOOD] >> test.py::test[blocks-combine_hashed_max-] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat >> SdkCredProvider::PingFromProviderSyncDiscovery >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] >> test.py::test[select-struct_members-default.txt] [GOOD] >> test.py::test[select-trivial_where-one] >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> test.py::test[in-in_enum_single0-default.txt] [GOOD] >> test.py::test[insert-drop_sortness-calc] [SKIPPED] >> test.py::test[insert-override-from_sorted] [SKIPPED] >> test.py::test[insert-select_subquery-] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys-off] [SKIPPED] >> test.py::test[join-anyjoin_common_nodup-off] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge-] [SKIPPED] >> test.py::test[join-inner_trivial-off] [SKIPPED] >> test.py::test[join-join_right_cbo-] [SKIPPED] >> test.py::test[join-join_without_column-off] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-] >> test.py::test[select-missing_with_nonpersist-] [GOOD] >> test.py::test[select-optional_pull-] [SKIPPED] >> test.py::test[select-type_assert-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt] [GOOD] >> test.py::test[lineage-select_field-default.txt] [SKIPPED] >> test.py::test[lineage-window_one-default.txt] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset-] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/py3test >> test.py::test_transfer_describe [GOOD] |98.8%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> test.py::test[schema-select_reordered-default.txt] [GOOD] >> test.py::test[schema-user_schema_mix1-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> test.py::test[blocks-date_less_scalar-] [GOOD] >> test.py::test[blocks-date_sub-] >> test.py::test[type_v3-ignore_v3_pragma-] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt] [GOOD] >> test.py::test[bigdate-tz_table_yt_key_filter-] [SKIPPED] >> test.py::test[blocks-combine_all_max_filter_opt-] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail-] [SKIPPED] >> test.py::test[udf-udaf_lambda-default.txt] >> test.py::test[weak_field-weak_field_join_condition-] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset-] [SKIPPED] >> test.py::test[window-current/ansi_current_mixed-] >> test.py::test[select-where_in-default.txt] [GOOD] >> test.py::test[select-where_with_lambda-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir >> test.py::test[blocks-combine_hashed_max-] [GOOD] >> test.py::test[blocks-combine_hashed_pg-] >> test.py::test[select-trivial_where-one] [GOOD] >> test.py::test[select-unlabeled-] >> test.py::test[blocks-combine_all_count-] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] >> test.py::test[schema-user_schema_mix1-] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt] >> test.py::test[count-boolean_count-] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> test.py::test[join-lookupjoin_inner_2o-] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-] >> test.py::test[optimizers-combinebykey_fields_subset-] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map-] [SKIPPED] >> test.py::test[optimizers-yql-9297_publish_ytcopy-] [SKIPPED] >> test.py::test[order_by-native_desc_sort-] [SKIPPED] >> test.py::test[order_by-order_by_expr_simple-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt] [GOOD] >> test.py::test[window-current/session-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] >> test.py::test[select-unlabeled-] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_base-default.txt] >> test.py::test[blocks-combine_hashed_pg-] [GOOD] >> test.py::test[blocks-interval_div-] >> test.py::test[window-current/ansi_current_mixed-] [GOOD] >> test.py::test[select-type_assert-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> test.py::test[select-from_in_front_sub-default.txt] [GOOD] >> test.py::test[select-opt_list_access-default.txt] >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> s3_recipe::import_test [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt-] [GOOD] >> test.py::test[blocks-combine_all_min_filter-] >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> test.py::test[blocks-date_sub-] [GOOD] >> test.py::test[blocks-div_uint64-] >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] |98.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/s3_recipe/import_test >> s3_recipe::import_test [GOOD] |98.8%| [TS] {RESULT} ydb/tests/tools/s3_recipe/import_test >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test.py::test[blocks-combine_all_count-] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop-] >> test.py::test[distinct-distinct_count_and_avg-default.txt] [GOOD] >> test.py::test[distinct-distinct_star-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_star1-] >> test_http_api.py::TestHttpApi::test_complex_results >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> test.py::test[select-where_with_lambda-] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt] [SKIPPED] >> test.py::test[type_v3-float-] [SKIPPED] >> test.py::test[type_v3-insert_struct_v3_with_native-] [SKIPPED] >> test.py::test[udf-udaf_distinct-] [SKIPPED] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] [GOOD] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] >> test.py::test[view-secure_eval_dyn-] [SKIPPED] >> test.py::test[weak_field-weak_field_join_where-] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] >> test.py::test[simple_columns-simple_columns_base-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt] >> test.py::test[window-current/session-] [GOOD] >> test.py::test[window-full/aggregations_leadlag-] >> test.py::test[order_by-order_by_expr_simple-] [GOOD] >> test.py::test[order_by-yql-19598-] [SKIPPED] >> test.py::test[pg-drop_table-] [SKIPPED] >> test.py::test[pg-join_using_tables1-default.txt] [SKIPPED] >> test.py::test[pg-point-default.txt] [SKIPPED] >> test.py::test[pg-select_from_columns_star-default.txt] [SKIPPED] >> test.py::test[pg-select_limit-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q02-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q46-default.txt] >> test.py::test[pg-tpcds-q46-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q03-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q13-default.txt] [SKIPPED] >> test.py::test[produce-process_rows_and_filter-] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir [GOOD] >> test.py::test[produce-reduce_all-default.txt] [SKIPPED] >> test.py::test[produce-reduce_subfields-] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream-] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream-] [SKIPPED] >> test.py::test[sampling-bind_expr_subquery-default.txt] >> test.py::test[select-opt_list_access-default.txt] [GOOD] >> test.py::test[select-uncorrelated_subqueries-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] >> test.py::test[join-lookupjoin_inner_empty_subq-] [GOOD] >> test.py::test[join-mapjoin_dup_key-off] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column-off] [SKIPPED] >> test.py::test[join-premap_common_cross-off] [SKIPPED] >> test.py::test[join-premap_common_inner_filter-] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-off] [SKIPPED] >> test.py::test[join-premap_context_dep-off] [SKIPPED] >> test.py::test[join-premap_map_semi-off] [SKIPPED] >> test.py::test[join-pullup_cross-] >> test.py::test[window-win_func_on_cloned_source-default.txt] [GOOD] >> test.py::test[window-win_func_rank_by_all-] >> test.py::test[blocks-filter_by_column_with_drop-] [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> test.py::test[blocks-interval_div-] [GOOD] >> test.py::test[blocks-top_sort_two_desc-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> test.py::test[blocks-struct_type-] >> test.py::test[distinct-distinct_star1-] [GOOD] >> test.py::test[dq-read_cost_native-default.txt] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous-] [SKIPPED] >> test.py::test[expr-double_join_with_list_from_range-] [SKIPPED] >> test.py::test[file-file_constness-] [SKIPPED] >> test.py::test[file-parse_file_in_select_as_str-] [SKIPPED] >> test.py::test[hor_join-out_mem_limit-default.txt] >> test.py::test[blocks-combine_all_min_filter-] [GOOD] >> test.py::test[blocks-combine_all_pg_filter-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option >> test.py::test[blocks-div_uint64-] [GOOD] >> test.py::test[blocks-interval_div_scalar-] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] [GOOD] >> replay::import_test [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> ydb_configure::import_test [GOOD] |98.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/import_test >> replay::import_test [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] |98.8%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/import_test >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt] [GOOD] >> test.py::test[table_range-merge_non_strict-] [SKIPPED] >> test.py::test[table_range-range_over_desc-] [SKIPPED] >> test.py::test[type_v3-append_diff_flags-] >> test.py::test[weak_field-weak_field_join_where-] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt] |98.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/import_test >> ydb_configure::import_test [GOOD] >> test.py::test[type_v3-append_diff_flags-] [SKIPPED] |98.8%| [TS] {RESULT} ydb/tools/cfg/bin/import_test >> test.py::test[type_v3-decimal_yt-] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint-tag_opt] >> test.py::test[join-pullup_cross-] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-] >> test.py::test[select-uncorrelated_subqueries-] [GOOD] >> local_ydb::import_test [GOOD] >> test.py::test[tpch-q16-default.txt] >> test.py::test[sampling-bind_expr_subquery-default.txt] [GOOD] >> test.py::test[sampling-subquery_limit-default.txt] [SKIPPED] >> test.py::test[sampling-zero_percentage-] [SKIPPED] >> test.py::test[schema-user_schema_mix3-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 |98.9%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-05-05T03:24:43.991967Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-05-05T03:24:43.992113Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-05-05T03:24:43.992120Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-05-05T03:24:43.992126Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-05-05T03:24:43.992129Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-05-05T03:24:43.992134Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-05-05T03:24:43.992137Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-05-05T03:24:43.992153Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-05-05T03:24:43.992173Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-05-05T03:24:43.992188Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-05-05T03:24:43.992194Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-05-05T03:24:43.992733Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-05-05T03:24:43.992754Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-05-05T03:24:43.992773Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-05-05T03:24:43.992778Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-05-05T03:24:43.992785Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-05-05T03:24:43.992795Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-05-05T03:24:43.992798Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-05-05T03:24:43.992801Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-05-05T03:24:43.992807Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-05-05T03:24:43.992810Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-05-05T03:24:43.992817Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-05-05T03:24:43.992821Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-05-05T03:24:44.015136Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-05-05T03:24:44.015186Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-05-05T03:24:44.015191Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-05-05T03:24:44.015196Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-05-05T03:24:44.015203Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-05-05T03:24:44.015208Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-05-05T03:24:44.015211Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-05-05T03:24:44.015228Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-05-05T03:24:44.015255Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-05-05T03:24:44.015271Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-05-05T03:24:44.015279Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-05-05T03:24:44.032690Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-05-05T03:24:44.032724Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-05-05T03:24:44.032730Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-05-05T03:24:44.032739Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-05-05T03:24:44.032743Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.032747Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.032790Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-05-05T03:24:44.032793Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.032795Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.032801Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-05-05T03:24:44.032809Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.032812Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.044252Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.044313Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.044319Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.044571Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.047429Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.047487Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.047492Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.052208Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.052279Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.052284Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.056193Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.056257Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.056262Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.063291Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.063370Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.063376Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.065553Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.065605Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.065610Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.068004Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.068030Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": 2025-05-05T03:24:44.068065Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.068076Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T03:24:44.068088Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Coordination node successfully created 2025-05-05T03:24:44.068095Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Start session 2025-05-05T03:24:44.068358Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Coordination node successfully created 2025-05-05T03:24:44.068370Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Start session 2025-05-05T03:24:44.068465Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.068472Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.070402Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-05-05T03:24:44.070426Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_ ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.367585Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-05-05T03:25:49.367629Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-05-05T03:25:49.367651Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T03:25:49.367669Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-05-05T03:25:49.367701Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-05-05T03:25:49.367724Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-05-05T03:25:49.367736Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-05-05T03:25:49.367748Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-05-05T03:25:49.367756Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-05-05T03:25:49.367761Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-05-05T03:25:49.367766Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-05-05T03:25:49.367771Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-05-05T03:25:49.367775Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-05-05T03:25:49.367779Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-05-05T03:25:49.367784Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-05-05T03:25:49.367788Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-05-05T03:25:49.367793Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-05-05T03:25:49.367801Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-05-05T03:25:49.367806Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-05-05T03:25:49.367819Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-05-05T03:25:49.367822Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-05-05T03:25:49.367827Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-05-05T03:25:49.367835Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-05-05T03:25:49.393796Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-05-05T03:25:49.397377Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-05-05T03:25:49.397408Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-05-05T03:25:49.397421Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-05-05T03:25:49.397428Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.397431Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.397630Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-05-05T03:25:49.397677Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-05-05T03:25:49.397702Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T03:25:49.397741Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-05-05T03:25:49.397757Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-05-05T03:25:49.397775Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-05-05T03:25:49.397783Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-05-05T03:25:49.397791Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-05-05T03:25:49.397796Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-05-05T03:25:49.397799Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-05-05T03:25:49.397802Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-05-05T03:25:49.397808Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-05-05T03:25:49.397812Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-05-05T03:25:49.397818Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-05-05T03:25:49.397827Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-05-05T03:25:49.397829Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-05-05T03:25:49.397833Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-05-05T03:25:49.419417Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-05-05T03:25:49.421247Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-05-05T03:25:49.421278Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-05-05T03:25:49.421293Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-05-05T03:25:49.421298Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.421302Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.421494Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-05-05T03:25:49.421539Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-05-05T03:25:49.421565Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T03:25:49.431828Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-05-05T03:25:49.431901Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-05-05T03:25:49.431941Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-05-05T03:25:49.432045Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T03:25:49.432080Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-05-05T03:25:49.432092Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T03:25:49.432114Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-05-05T03:25:49.432119Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T03:25:49.432135Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-05-05T03:25:49.432140Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T03:25:49.432153Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-05-05T03:25:49.432157Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-05-05T03:25:49.432168Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-05-05T03:25:49.458681Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-05-05T03:25:49.460454Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-05-05T03:25:49.460475Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-05-05T03:25:49.460489Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-05-05T03:25:49.460493Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.460497Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T03:25:49.460523Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-05-05T03:25:49.460560Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-05-05T03:25:49.460579Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T03:25:49.460612Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-05-05T03:25:49.460621Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-05-05T03:25:49.460625Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-05-05T03:25:49.460632Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] |98.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/import_test >> local_ydb::import_test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test.py::test[window-win_func_rank_by_all-] [GOOD] >> test.py::test[window-win_fuse_window-default.txt] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic] [SKIPPED] >> ydb-tests-functional-api::import_test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] |98.9%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest |98.9%| [TS] {RESULT} ydb/public/tools/local_ydb/import_test >> test_break.py::test_create_minidump >> test.py::test[window-full/aggregations_leadlag-] [GOOD] >> test.py::test[blocks-struct_type-] [GOOD] >> test.py::test[window-full/leadlag_compact-] >> test.py::test[blocks-combine_all_pg_filter-] [GOOD] >> test.py::test[blocks-combine_hashed_some-] >> test.py::test[column_group-groups-lookup] |98.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |98.9%| [TS] {RESULT} ydb/tests/functional/api/import_test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> test.py::test[ParseFromYdb] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] >> test.py::test[blocks-interval_div_scalar-] [GOOD] >> test.py::test[blocks-mul_uint64_opt2-] >> ydb-tests-stress-mixedpy::import_test [GOOD] |98.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/py3test >> test.py::test[ypath-empty_range-dynamic] [SKIPPED] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> test.py::test[join-selfjoin_on_sorted_with_rename-] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt] [GOOD] >> test.py::test[window-rank/opt-] >> test.py::test[join-star_join_inners_premap-] [SKIPPED] >> test.py::test[join-trivial_view-] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test.py::test[schema-user_schema_mix3-] [GOOD] >> test.py::test[select-append_to_value-] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |98.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/import_test >> ydb-tests-stress-mixedpy::import_test [GOOD] |98.9%| [TS] {RESULT} ydb/tests/stress/mixedpy/import_test >> test.py::test[blocks-top_sort_two_desc-] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt] >> test.py::test[BitSerialization] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] |98.9%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> test.py::test[hor_join-out_mem_limit-default.txt] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt] >> test.py::test[type_v3-ignore_v3_hint-tag_opt] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs-] [SKIPPED] >> test.py::test[union-union_multiin-] >> test.py::test[tpch-q16-default.txt] [GOOD] >> test.py::test[tpch-q2-default.txt] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt] [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> test.py::test[column_group-groups-lookup] [GOOD] >> test.py::test[column_group-groups-max] >> test.py::test[window-full/leadlag_compact-] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt] >> test.py::test[solomon-BadDownsamplingAggregation-] >> test.py::test[tpch-q12-default.txt] [SKIPPED] >> test.py::test[tpch-q6-default.txt] >> test.py::test[ParseFromYdb] [GOOD] >> test.py::test[SerializeCSVWithNames] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> ydb-tests-stability-ydb::import_test [GOOD] >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] [GOOD] >> test.py::test[blocks-mul_uint64_opt2-] [GOOD] >> test.py::test[column_group-groups-perusage] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] |98.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/import_test >> ydb-tests-stability-ydb::import_test [GOOD] |99.0%| [TS] {RESULT} ydb/tests/stability/ydb/import_test |99.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/import_test >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |99.0%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/import_test >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] [GOOD] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> test.py::test[select-append_to_value-] [GOOD] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt] >> test.py::test[window-rank/opt-] [GOOD] >> test.py::test[window-rank/unordered-] >> test.py::test[BitSerialization] [GOOD] >> test.py::test[CosineDistance] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> test.py::test[coalesce-coalesce_few_real-default.txt] [GOOD] >> test.py::test[column_group-hint_anon-disable] [SKIPPED] >> test.py::test[column_group-hint_anon-single] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4-] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test.py::test[column_group-insert_diff_groups3_fail-] [SKIPPED] >> test.py::test[column_group-publish-single] [SKIPPED] >> test.py::test[column_order-select_orderby-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> test.py::test[blocks-combine_hashed_some-] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> test.py::test[join-trivial_view-] [GOOD] >> test.py::test[join-yql-8131-] >> test.py::test[blocks-combine_hashed_sum-] |99.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |99.0%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> test_break.py::test_create_minidump [GOOD] >> test_break.py::test_minidump_script >> test.py::test[window-row_number_no_part_from_subq-default.txt] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other-] >> test.py::test[column_group-groups-max] [GOOD] >> test.py::test[column_group-hint-disable] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail-] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail-] [SKIPPED] >> test.py::test[column_order-align_publish_native-] >> test.py::test[column_order-align_publish_native-] [SKIPPED] >> test.py::test[count-count_all_grouped-empty] >> test.py::test[hor_join-out_table_record-default.txt] [GOOD] >> test.py::test[hor_join-sorted_out_mix-] [SKIPPED] >> test.py::test[insert-append_missing_null-default.txt] >> test.py::test[column_group-groups-perusage] [GOOD] >> test.py::test[column_group-insert_diff_groups2_fail-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> test.py::test[insert-append_missing_null-default.txt] [SKIPPED] >> test.py::test[insert-keepmeta-with_view] [SKIPPED] >> test.py::test[insert_monotonic-several2-default.txt] [SKIPPED] >> test.py::test[join-bush_in-off] [SKIPPED] >> test.py::test[join-force_merge_join-default.txt] >> test.py::test[column_group-insert_diff_groups2_fail-] [SKIPPED] >> test.py::test[column_order-join-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber >> test.py::test[union-union_multiin-] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> library-yql-udfs-common-clickhouse-client-test::import_test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] [GOOD] |99.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/udfs/common/clickhouse/client/test/import_test >> library-yql-udfs-common-clickhouse-client-test::import_test [GOOD] |99.0%| [TS] {RESULT} ydb/library/yql/udfs/common/clickhouse/client/test/import_test >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] [SKIPPED] >> test.py::test[tpch-q2-default.txt] [GOOD] >> test.py::test[tpch-q5-default.txt] >> recipe::import_test [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] >> test.py::test[column_order-select_orderby-default.txt] [GOOD] >> test.py::test[column_order-select_plain-default.txt] >> test.py::test[tpch-q6-default.txt] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt] [GOOD] >> test.py::test[select-create_tuples-default.txt] >> test.py::test[SerializeCSVWithNames] [GOOD] |99.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/token_accessor_mock/import_test >> recipe::import_test [GOOD] |99.0%| [TS] {RESULT} ydb/tests/tools/token_accessor_mock/import_test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] [GOOD] >> test.py::test[blocks-combine_hashed_sum-] [GOOD] >> test.py::test[blocks-interval_add_date-] >> test.py::test[SerializeJSONEachRow] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> test.py::test[join-yql-8131-] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt] >> test.py::test[CosineDistance] [GOOD] >> test.py::test[CosineSimilarity] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] >> test.py::test[count-count_all_grouped-empty] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt] >> test.py::test[window-win_func_lead_lag_worm_with_part_other-] [GOOD] >> test.py::test[window-win_func_part_by_expr-] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> test.py::test[dq-dq_replicate_ok-default.txt] [SKIPPED] >> test.py::test[hor_join-empty_out_hor_join-default.txt] >> test.py::test[union_all-union_all_fields-default.txt] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt] >> test.py::test[window-rank/unordered-] [GOOD] >> test.py::test[window-win_expr_bounds-] >> test.py::test[window-win_expr_bounds-] [SKIPPED] >> test.py::test[window-win_func_over_group_by-] >> test.py::test[join-force_merge_join-default.txt] [GOOD] >> test.py::test[join-left_cast_to_string-off] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_right-] >> ydb-library-yql-udfs-common-knn-test::import_test [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt] >> test.py::test[tpch-q5-default.txt] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield] >> ydb-tests-fq-yds::import_test [GOOD] >> test.py::test[column_order-join-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> test.py::test[column_order-select_distinct_star-default.txt] |99.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/udfs/common/knn/test/import_test >> ydb-library-yql-udfs-common-knn-test::import_test [GOOD] |99.0%| [TS] {RESULT} ydb/library/yql/udfs/common/knn/test/import_test |99.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |99.0%| [TS] {RESULT} ydb/tests/fq/yds/import_test >> test.py::test[blocks-interval_add_date-] [GOOD] >> test.py::test[blocks-interval_add_interval-] >> test.py::test[select-create_tuples-default.txt] [GOOD] >> test.py::test[select-dot_in_alias-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_commit.py::TestCommit::test_commit >> test.py::test[union_all-union_all_with_parenthesis-default.txt] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme-] >> test.py::test[column_order-select_plain-default.txt] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] >> test.py::test[hor_join-empty_out_hor_join-default.txt] [GOOD] >> test.py::test[hor_join-fuse_multi_outs2-outlimit] [SKIPPED] >> test.py::test[insert-select_after_insert_relabeled-default.txt] [SKIPPED] >> test.py::test[insert-yql-13083-existig] [SKIPPED] >> test.py::test[join-anyjoin_common_dup-off] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-off] >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off] [SKIPPED] >> test.py::test[join-full_equal_not_null-off] [SKIPPED] >> test.py::test[join-grace_join1-off] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty-] >> test_break.py::test_minidump_script [GOOD] >> test_break.py::test_minidump_script_args >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> test.py::test[union_all-mix_map_and_read-default.txt] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt] >> test_commit.py::TestCommit::test_commit [GOOD] >> test_timeout.py::TestTimeout::test_timeout >> test_tpch.py::TestTpchS1::test_tpch[1] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> test.py::test[join-left_join_right_pushdown_nested_right-] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533-] >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test.py::test[type_v3-ignore_v3_hint-protofield] [GOOD] >> test.py::test[union_all-infer_3-default.txt] >> test.py::test[SerializeJSONEachRow] [GOOD] >> test.py::test[SerializeParquet] >> test.py::test[CosineSimilarity] [GOOD] >> test.py::test[ErrorDistanceInvalidFormat] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] [GOOD] |99.1%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [GOOD] >> test.py::test[select-dot_in_alias-default.txt] [GOOD] >> test.py::test[window-win_func_part_by_expr-] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt] >> test.py::test[ErrorDistanceInvalidFormat] [GOOD] >> test.py::test[ErrorDistanceSameFormat] >> ydb-tests-tools-kqprun-tests::import_test [GOOD] >> test.py::test[join-late_mergejoin_on_empty-] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme-] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[0] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[window-win_func_over_group_by-] [GOOD] >> test.py::test[blocks-interval_add_interval-] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> test.py::test[union_all-infer_3-default.txt] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLargeFile [GOOD] >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test_break.py::test_minidump_script_args [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> test.py::test[SerializeParquet] [GOOD] >> test.py::test[join-lookupjoin_bug8533-] [GOOD] >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] [GOOD] >> test.py::test[window-win_over_few_partitions-] >> test.py::test[join-lookupjoin_not_selected-] >> test.py::test[lineage-error_type-] [SKIPPED] >> test.py::test[column_order-select_limit_offset-default.txt] [GOOD] >> test.py::test[join-left_null_literal-] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-] >> test.py::test[window-current/ansi_current-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] >> test.py::test[view-secure-] [SKIPPED] >> test_break.py::test_compatibility_info >> test.py::test[blocks-sort_one_asc-] >> test.py::test[view-file_outer-] [SKIPPED] >> test.py::test[select-optional_as_warn-default.txt] >> test.py::test[ErrorDistanceSameFormat] [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> test.py::test[join-lookupjoin_not_selected-] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] >> test.py::test[window-win_over_few_partitions-] [GOOD] >> test.py::test[lineage-with_inline-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_having_no_agg-default.txt] >> test.py::test[join-lookupjoin_semi_subq-] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type-off] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> test.py::test[window-current/ansi_current-] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt] >> test.py::test[SerializeParquetPartitioned] >> test.py::test[ypath-multi_key-default.txt] >> test.py::test[flatten_by-flatten_and_where-] >> test_break.py::test_compatibility_info [GOOD] >> test.py::test[blocks-sort_one_asc-] [GOOD] >> test.py::test[weak_field-weak_field_data-] >> KqpFederatedQuery::ExecuteScriptWithThinFile >> test.py::test[select-optional_as_warn-default.txt] [GOOD] >> test.py::test[ErrorDistanceSameSize] >> test.py::test[window-yql-14738-default.txt] >> test.py::test[optimizers-yql-7767_key_filter_with_view-] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-] >> test.py::test[order_by-limit-] >> test.py::test[weak_field-weak_field_data-] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] [SKIPPED] >> test.py::test[ErrorDistanceSameSize] [GOOD] >> test.py::test[select-select_all_group_by_column-] >> test.py::test[join-lookupjoin_semi_empty-off] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[window-yql-14738-default.txt] [GOOD] >> test.py::test[order_by-limit-] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-] [GOOD] >> test.py::test[window-full/session_compact-] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt] [GOOD] >> test.py::test[dq-precompute_result-default.txt] [SKIPPED] >> test.py::test[flatten_by-flatten_and_where-] [GOOD] >> test.py::test[ypath-multi_key-default.txt] [GOOD] >> test.py::test[blocks-sort_two_asc-] >> test.py::test[expr-non_persistable_insert_into_fail-] >> test.py::test[ErrorDistanceSameTag] >> test.py::test[select-select_all_group_by_column-] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] >> test.py::test[order_by-literal_complex-] [SKIPPED] >> test.py::test[order_by-singular-default.txt] [SKIPPED] >> test.py::test[pg-nulls-default.txt] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off] [SKIPPED] >> test.py::test[pg-pg_types_orderby-] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] >> test.py::test[weak_field-weak_field_long_fields-] >> test.py::test[flatten_by-flatten_one_field-] >> test.py::test[join-mergejoin_with_different_key_names_nested-] >> test.py::test[pg-tpcds-q23-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q24-default.txt] >> test.py::test[expr-non_persistable_insert_into_fail-] [SKIPPED] >> test.py::test[solomon-HistResponse-default.txt] >> test.py::test[select-shift_columns-default.txt] >> test.py::test[join-lookupjoin_semi_subq-off] [SKIPPED] >> test.py::test[ErrorDistanceSameTag] [GOOD] >> test.py::test[window-full/aggregations_compact-] >> test.py::test[blocks-sort_two_asc-] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt] [SKIPPED] >> test.py::test[select-shift_columns-default.txt] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] [GOOD] |99.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] >> test.py::test[hor_join-group_sampling-] [SKIPPED] >> test.py::test[weak_field-weak_field_long_fields-] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] [GOOD] >> test.py::test[distinct-distinct_count_no_gouping-default.txt] |99.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] >> test.py::test[pg-tpcds-q49-default.txt] [SKIPPED] >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q67-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q70-default.txt] >> test.py::test[select-table_funcs_spec-default.txt] >> test.py::test[solomon-InvalidProject-] >> test.py::test[ErrorFloatFromBinaryStringBitVector] >> test.py::test[join-mapjoin_early_rewrite_star-] >> S3PathStyleBackup::DisableVirtualAddressing >> test.py::test[window-full/session_compact-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-] [GOOD] >> test.py::test[SerializeParquetPartitioned] [GOOD] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> test.py::test[window-full/aggregations_compact-] [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> test.py::test[flatten_by-flatten_one_field-] [GOOD] >> ydb-tests-functional-postgresql::import_test [GOOD] >> test.py::test[window-current/aggregations-] >> test.py::test[pg-tpcds-q70-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_count_no_gouping-default.txt] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] >> test.py::test[select-table_funcs_spec-default.txt] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off] [SKIPPED] >> test.py::test[join-pullup_inner-] >> test.py::test[SerializeTSVWithNames] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> test.py::test[window-current/aggregations-] [GOOD] >> test.py::test[hor_join-max_outtables-] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_star-] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props-] >> test.py::test[distinct-distinct_union_all-default.txt] >> test.py::test[ErrorFloatFromBinaryStringBitVector] [GOOD] >> test.py::test[window-current/session_aliases-] >> test.py::test[window-win_func_into_udf-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] [GOOD] >> test.py::test[join-pullup_inner-] [GOOD] >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> test.py::test[hor_join-out_sampling-] >> test.py::test[SerializeTSVWithNames] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-] >> test.py::test[window-win_func_with_struct_access-default.txt] >> test.py::test[pg-tpcds-q83-default.txt] [SKIPPED] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] [GOOD] |99.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> test.py::test[join-split_to_list_as_key-] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] [GOOD] >> test.py::test[pragma-file-default.txt] [SKIPPED] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/py3test >> test.py::test[ypath-multi_key-default.txt] [GOOD] >> test.py::test[produce-process_with_assume-] [SKIPPED] |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/py3test >> test_break.py::test_compatibility_info [GOOD] >> test.py::test[select-trivial_between-default.txt] |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt] [GOOD] >> test.py::test[ErrorFloatFromBinaryStringEmpty] >> test.py::test[window-current/session_aliases-] [GOOD] >> test.py::test[window-win_func_into_udf-] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] >> test.py::test[hor_join-out_sampling-] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] >> TPqWriterTest::TestWriteToTopic >> test.py::test[produce-process_with_python_stream-empty] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props-] [GOOD] >> test.py::test[ErrorFloatFromBinaryStringEmpty] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt] >> test.py::test[YqlType] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off] [SKIPPED] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> test.py::test[window-win_func_with_struct_access-default.txt] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] >> test.py::test[window-yql-14479-default.txt] [SKIPPED] >> test.py::test[window-generic/aggregations_include_current-] >> test.py::test[join-split_to_list_as_key-] [GOOD] >> test.py::test[select-trivial_between-default.txt] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt] [SKIPPED] >> test.py::test[insert-keepmeta_nonstrict_fail-] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] [GOOD] >> test.py::test[join-star_join_inners-off] [SKIPPED] >> test.py::test[produce-reduce_multi_in-sorted] [SKIPPED] >> test.py::test[ErrorFloatFromBinaryStringInvalid] >> test.py::test[join-mergejoin_force_no_sorted-off] [SKIPPED] >> test.py::test[join-order_of_qualified-] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt] [GOOD] >> test.py::test[hor_join-yql19332_aux_cols-] [SKIPPED] >> test.py::test[YqlType] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt] >> test.py::test[window-generic/aggregations_include_current-] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] >> test.py::test[join-yql-8125-] [SKIPPED] >> test.py::test[insert-keepmeta_with_read_udf_fail-] [SKIPPED] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[ErrorFloatFromBinaryStringInvalid] [GOOD] >> test.py::test[join-order_of_qualified-] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table-] >> test.py::test[insert-merge_publish-] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt] [GOOD] >> TPqWriterTest::TestWriteToTopic [GOOD] >> test.py::test[ql_filter-integer_escaping-] [SKIPPED] >> test.py::test[key_filter-dict_contains_optional-] >> test.py::test[window-leading/aggregations-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] [GOOD] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/py3test >> test.py::test[window-yql-14738-default.txt] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] >> test.py::test[insert-replace_ordered_by_key-default.txt] [SKIPPED] >> test.py::test[insert-select_after_replace-default.txt] [SKIPPED] >> test.py::test[schema-append_to_desc_with_remap-] [SKIPPED] >> test.py::test[join-premap_common_inner-off] [SKIPPED] |99.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail-] [SKIPPED] >> test.py::test[table_range-range_over_filter-] [SKIPPED] >> test.py::test[EuclideanDistance] >> test.py::test[select-refselect-1000] [SKIPPED] >> test.py::test[join-premap_common_inner_both_sides-off] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] >> test.py::test[window-leading/aggregations-] [GOOD] |99.1%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/import_test >> test.py::test[tpch-q10-default.txt] >> test.py::test[insert-values_subquery-] [SKIPPED] >> test.py::test[select-result_size_limit-] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] [GOOD] >> TPqWriterTest::TestWriteToTopicMultiBatch >> test.py::test[join-premap_common_multiparents-off] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap-off] [SKIPPED] |99.1%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[insert_monotonic-break_sort_fail-] [SKIPPED] >> test.py::test[EuclideanDistance] [GOOD] >> test.py::test[select-result_size_limit-] [GOOD] >> test.py::test[insert_monotonic-keep_unique-] [SKIPPED] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] >> test.py::test[join-premap_common_semi-] [SKIPPED] |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test.py::test[join-bush_in-] >> test.py::test[InnerProductSimilarity] >> test.py::test[select-result_size_limit_with_fill-] >> test.py::test[join-pullup_renaming-] |99.2%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test >> test.py::test[insert_monotonic-to_empty-] [SKIPPED] >> test.py::test[join-bush_in-] [GOOD] >> test.py::test[join-cbo_7tables_only_common_join-] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill-] [GOOD] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[InnerProductSimilarity] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off] [SKIPPED] >> test.py::test[select-select_all_ordered-default.txt] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v2-date_time/completeness_iso/test.csv-csv_with_names] >> test.py::test[Int8Serialization] >> test.py::test[select-select_all_ordered-default.txt] [GOOD] >> test.py::test[join-group_compact_by-] >> test.py::test[join-equi_join_three_asterisk_eval-off] [SKIPPED] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v2-date_time/completeness_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[Int8Serialization] [GOOD] >> test.py::test[join-flatten_columns2-] >> test.py::test[select-to_dict-default.txt] >> test.py::test[join-group_compact_by-] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/as_default/test.csv] >> test.py::test[LazyListSerialization] >> test.py::test[join-flatten_columns2-] [GOOD] >> test.py::test[join-inner_on_key_only-] >> test.py::test[select-to_dict-default.txt] [GOOD] >> test.py::test[LazyListSerialization] [GOOD] >> test.py::test[join-grace_join2-] [SKIPPED] >> test.py::test[select-two_selects_with_diff_fields-default.txt] >> test.py::test[join-inner_on_key_only-] [GOOD] >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> test.py::test[ListSerialization] >> test.py::test[join-inner_with_select-] >> test.py::test[join-lookupjoin_inner_1o2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi-off] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys-] >> test.py::test[join-lookupjoin_unused_keys-] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/py3test >> test.py::test[window-win_func_with_struct_access-default.txt] [GOOD] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/udfs/common/clickhouse/client/test/py3test >> test.py::test[YqlType] [GOOD] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/py3test >> test.py::test[ypath-limit_with_key-default.txt] [SKIPPED] |99.2%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |99.2%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> TPqWriterTest::TestDeferredWriteToTopic >> test.py::test[ListSerialization] [GOOD] >> test.py::test[ManhattanDistance] >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> test.py::test[tpch-q10-default.txt] [GOOD] >> test.py::test[udf-python_struct-] [SKIPPED] >> test.py::test[udf-two_regexps-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/py3test >> test.py::test[window-leading/aggregations-] [GOOD] |99.2%| [TM] {RESULT} ydb/tests/functional/minidumps/py3test >> test.py::test[select-two_selects_with_diff_fields-default.txt] [GOOD] >> test.py::test[tpch-q21-default.txt] [SKIPPED] >> test.py::test[type_v3-append_diff_layout1-] [SKIPPED] >> test.py::test[type_v3-append_struct-default.txt] [SKIPPED] >> test.py::test[type_v3-replace_diff_layout-] [SKIPPED] >> test.py::test[union_all-union_all_multiple-default.txt] >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> test.py::test[key_filter-dict_contains_optional-] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter1-] [SKIPPED] >> test.py::test[key_filter-yql-14157-] [SKIPPED] >> test.py::test[limit-zero_limit-default.txt] >> TPqWriterTest::TestCheckpoints |99.2%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |99.2%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> test.py::test[join-pullup_renaming-] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00076c/ydb/tests/fq/restarts/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00076c/ydb/tests/fq/restarts/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1523009) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1526018 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/as_default/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/parse_error/test.csv] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] >> test.py::test[join-inner_with_select-] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty-] |99.3%| [TM] {RESULT} ydb/tests/fq/restarts/py3test |99.3%| [TM] {RESULT} ydb/library/yql/udfs/common/clickhouse/client/test/py3test >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> test.py::test[flatten_by-flatten_by_typed_table-] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by-] >> TPqWriterTest::TestCheckpoints [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-] [GOOD] >> test.py::test[join-right_trivial-off] [SKIPPED] >> test.py::test[join-star_join-off] [SKIPPED] >> test.py::test[join-star_join_multi-] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> test.py::test[ManhattanDistance] [GOOD] >> test.py::test[NullForwarding] >> test.py::test[join-lookupjoin_semi_empty-] [GOOD] >> test.py::test[join-lookupjoin_take_skip-] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary-] >> test.py::test[union_all-union_all_multiple-default.txt] [GOOD] >> test.py::test[view-file_inner_udf-] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns-] [SKIPPED] >> test.py::test[weak_field-weak_field-] >> test.py::test[join-mapjoin_early_rewrite-] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off] [SKIPPED] >> test.py::test[join-mergejoin_big_primary-off] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch-] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/parse_error/test.csv] [GOOD] >> test.py::test[limit-zero_limit-default.txt] [GOOD] >> test.py::test[lineage-isolated-default.txt] [SKIPPED] >> test.py::test[lineage-reduce_all-default.txt] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt] [SKIPPED] >> test.py::test[multicluster-basic-default.txt] [SKIPPED] >> test.py::test[optimizers-simplified_path_constraint-] [SKIPPED] >> test.py::test[optimizers-unused_columns_window-] >> test.py::test[udf-two_regexps-] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit-] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/as_default/test.csv] >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[NullForwarding] [GOOD] >> test.py::test[OptionalAutoUnpacking] >> test.py::test[solomon-Subquery-default.txt] >> test_workload.py::TestYdbLogWorkload::test[row] >> test.py::test[flatten_by-flatten_list_on_flatten_by-] [GOOD] >> test.py::test[hor_join-group_yamr-] [SKIPPED] >> test.py::test[hor_join-skip_yamr-] [SKIPPED] >> test.py::test[insert-select_operate_with_columns-] [SKIPPED] >> test.py::test[insert-trivial_literals-default.txt] [SKIPPED] >> test.py::test[insert_monotonic-break_unique_fail-] [SKIPPED] >> test.py::test[insert_monotonic-several1-default.txt] [SKIPPED] >> test.py::test[join-bush_dis_in_in-off] [SKIPPED] >> test.py::test[join-equi_join_three_simple-off] [SKIPPED] >> test.py::test[join-flatten_columns2-off] [SKIPPED] >> test.py::test[join-inner_all-off] [SKIPPED] >> test.py::test[join-join_comp_common_table-off] [SKIPPED] >> test.py::test[join-join_left_cbo-] [SKIPPED] >> test.py::test[join-lookupjoin_not_selected-off] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys-] >> test_workload.py::TestYdbWorkload::test >> test.py::test[OptionalAutoUnpacking] [GOOD] >> test.py::test[Uint8Serialization] >> test.py::test[udf-udf_call_with_group_and_limit-] [GOOD] >> test.py::test[view-standalone_view_lambda-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_combine-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-05-05T03:24:16.022132Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7500797265334188961:2053], metadatafields: , partitions: 666 2025-05-05T03:24:16.229723Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-05-05T03:24:16.229741Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-05-05T03:24:16.229743Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7500797265334188961:2053]) 2025-05-05T03:24:16.229755Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7500797269629156263:2048] 2025-05-05T03:24:16.229869Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7500797265334188962:2054] 2025-05-05T03:24:16.229881Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7500797265334188962:2054], partIds: 666 cookie 1 2025-05-05T03:24:16.229934Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7500797265334188962:2054], cookie 1 2025-05-05T03:24:16.229937Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-05-05T03:24:16.229938Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-05-05T03:24:16.229942Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7500797265334188964:2056], generation 1 2025-05-05T03:24:16.229946Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7500797265334188964:2056], connection id 1 partitions offsets (666 / ), 2025-05-05T03:24:16.230024Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7500797265334188964:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-05-05T03:24:16.230068Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7500797265334188964:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T03:24:16.230186Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7500797265334188964:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T03:24:16.230209Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-05-05T03:24:16.230211Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-05-05T03:24:16.230259Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-05-05T03:24:16.230294Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-05-05T03:24:16.230296Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-05-05T03:24:16.230458Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-05-05T03:24:16.230468Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7500797265334188964:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2025-05-05T03:24:16.230469Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500797269629156263:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7500797265334188964:2056] generation 1 2025-05-05T03:24:16.422803Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7500797270487528582:2053], metadatafields: , partitions: 666 2025-05-05T03:24:16.621919Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-05-05T03:24:16.621941Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-05-05T03:24:16.621944Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7500797270487528582:2053]) 2025-05-05T03:24:16.621957Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [3:7500797270487528588:2048] 2025-05-05T03:24:16.621997Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [3:7500797270487528583:2054] 2025-05-05T03:24:16.622030Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [3:7500797270487528583:2054], partIds: 666 cookie 1 2025-05-05T03:24:16.622165Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [3:7500797270487528583:2054], cookie 1 2025-05-05T03:24:16.622173Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-05-05T03:24:16.622175Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-05-05T03:24:16.622180Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [3:7500797270487528585:2056], generation 1 2025-05-05T03:24:16.622186Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [3:7500797270487528585:2056], connection id 1 partitions offsets (666 / ), 2025-05-05T03:24:16.622246Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [3:7500797270487528585:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-05-05T03:24:16.622478Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7500797270487528585:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T03:24:16.622752Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7500797270487528585:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T03:24:16.622763Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-05-05T03:24:16.622765Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-05-05T03:24:16.622771Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-05-05T03:24:16.622828Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-05-05T03:24:16.622833Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-05-05T03:24:16.622883Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [3:7500797270487528585:2056], reason Disconnected, cookie 999 2025-05-05T03:24:16.622898Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7500797270487528585:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T03:24:16.623047Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2025-05-05T03:24:16.623094Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7500797270487528585:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T03:24:16.623101Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 3 2025-05-05T03:24:16.623104Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-05-05T03:24:16.623114Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 3 2025-05-05T03:24:16.623121Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-05-05T03:24:16.623380Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-05-05T03:24:16.623411Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 3 NotifyCA 2 [3:7500797270487528585:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=3 has pending data 2025-05-05T03:24:16.623412Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500797270487528588:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [3:7500797270487528585:2056] generation 1 2025-05-05T03:24:16.842795Z node 5 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [5:7500797269082344131:2053], metadat ... xId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [bda6d987-789a85bd-92c90d6b-b79cf0cd] Write session: send init request: init_request { path: "Checkpoints" producer_id: "bda6d987-789a85bd-92c90d6b-b79cf0cd" message_group_id: "bda6d987-789a85bd-92c90d6b-b79cf0cd" } 2025-05-05T03:26:15.567805Z :TRACE: [local] TRACE_EVENT InitRequest 2025-05-05T03:26:15.567905Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [bda6d987-789a85bd-92c90d6b-b79cf0cd] Write session: OnWriteDone gRpcStatusCode: 0 2025-05-05T03:26:15.567971Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Successfully connected. Initializing session 2025-05-05T03:26:15.572092Z :INFO: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Server session id: test_client_1_22_1298341424453959041_v1 2025-05-05T03:26:15.572106Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-05T03:26:15.572196Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-05-05T03:26:15.574627Z :INFO: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-05-05T03:26:15.578114Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [bda6d987-789a85bd-92c90d6b-b79cf0cd] Write session: OnReadDone gRpcStatusCode: 0 2025-05-05T03:26:15.578142Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [bda6d987-789a85bd-92c90d6b-b79cf0cd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1746415575578 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T03:26:15.578154Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Got ReadResponse, serverBytesSize = 935, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427865 2025-05-05T03:26:15.578188Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [bda6d987-789a85bd-92c90d6b-b79cf0cd] Write session established. Init response: last_seq_no: 5 session_id: "bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0" 2025-05-05T03:26:15.578210Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0 2025-05-05T03:26:15.578216Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] MessageGroupId [bda6d987-789a85bd-92c90d6b-b79cf0cd] Write session: set DirectWriteToPartitionId 0 2025-05-05T03:26:15.578242Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427865 2025-05-05T03:26:15.578297Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-05-05T03:26:15.578310Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-05-05T03:26:15.578332Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-05-05T03:26:15.578352Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Returning serverBytesSize = 935 to budget 2025-05-05T03:26:15.578358Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-05-05T03:26:15.578359Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] In ContinueReadingDataImpl, ReadSizeBudget = 935, ReadSizeServerDelta = 52427865 2025-05-05T03:26:15.578524Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-05-05T03:26:15.578546Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-05-05T03:26:15.578554Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-05-05T03:26:15.578561Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-05-05T03:26:15.578565Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2025-05-05T03:26:15.578573Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2025-05-05T03:26:15.578628Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-05-05T03:26:15.578643Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Returning serverBytesSize = 0 to budget 2025-05-05T03:26:15.578726Z :INFO: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] Closing read session. Close timeout: 0.000000s 2025-05-05T03:26:15.578739Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-05-05T03:26:15.578750Z :INFO: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] Counters: { Errors: 0 CurrentSessionLifetimeMs: 13 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T03:26:15.578770Z :NOTICE: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-05-05T03:26:15.578776Z :DEBUG: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] [] Abort session to cluster 2025-05-05T03:26:15.578907Z :INFO: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] Closing read session. Close timeout: 0.000000s 2025-05-05T03:26:15.578914Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-05-05T03:26:15.578933Z :INFO: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] Counters: { Errors: 0 CurrentSessionLifetimeMs: 13 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T03:26:15.578941Z :NOTICE: [local] [local] [b67b4b3b-a1374139-77b4f9cf-822cb133] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-05-05T03:26:15.580140Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-05-05T03:26:15.580152Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-05-05T03:26:15.580159Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-05-05T03:26:15.580165Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-05-05T03:26:15.580169Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-05-05T03:26:15.580660Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: direct write to partition: 0, generation 1 2025-05-05T03:26:15.580688Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: send init request: init_request { path: "Checkpoints" producer_id: "bda6d987-789a85bd-92c90d6b-b79cf0cd" partition_with_generation { generation: 1 } } 2025-05-05T03:26:15.580693Z :TRACE: [local] TRACE_EVENT InitRequest pwg_partition_id=0 pwg_generation=1 2025-05-05T03:26:15.580781Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: OnWriteDone gRpcStatusCode: 0 2025-05-05T03:26:15.580994Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-05-05T03:26:15.581003Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session will now close 2025-05-05T03:26:15.581024Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: aborting 2025-05-05T03:26:15.581133Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-05-05T03:26:15.581141Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-05-05T03:26:15.581153Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-05-05T03:26:15.581156Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-05-05T03:26:15.581341Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-05-05T03:26:15.581541Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [bda6d987-789a85bd-92c90d6b-b79cf0cd|298a0815-e7a4c827-447a2fc6-26381204_0] PartitionId [0] Generation [1] Write session: destroy 2025-05-05T03:26:15.919334Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-05-05T03:26:15.924254Z node 54 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-05-05T03:26:15.924305Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "ca8bb885-81cd3cd9-a0d880bf-69a83e9e" } |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/py3test >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] |99.3%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> test.py::test[optimizers-unused_columns_window-] [GOOD] >> test.py::test[optimizers-yql-15210_sqlin-] |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/as_default/test.csv] [GOOD] >> test.py::test[Uint8Serialization] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/parse_error/test.csv] >> test.py::test[join-mergejoin_choose_primary-] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted-] |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/py3test |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/py3test |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/py3test >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch-] [GOOD] >> test.py::test[join-nested_semi_join-] >> test.py::test[weak_field-weak_field-] [GOOD] >> test.py::test[weak_field-weak_field_strict-] >> test.py::test[solomon-UnknownSetting-] [GOOD] |99.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/udfs/common/knn/test/py3test >> test.py::test[Uint8Serialization] [GOOD] |99.3%| [TM] {RESULT} ydb/library/yql/udfs/common/knn/test/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] >> test.py::test[join-star_join_multi-] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix-] [SKIPPED] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |99.4%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> test.py::test[key_filter-ranges-] >> test.py::test[join-mergejoin_unused_keys-] [GOOD] >> test.py::test[join-premap_merge_extrasort2-] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-off] >> test.py::test[join-selfjoin_on_sorted_with_filter-off] [SKIPPED] >> test.py::test[join-star_join_with_diff_complex_key-] >> test.py::test[weak_field-optimize_weak_fields_combine-] [GOOD] >> test.py::test[weak_field-weak_field_aggregation-] >> test.py::test[optimizers-yql-15210_sqlin-] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc-] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/parse_error/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/as_default/test.csv] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> Transfer::CreateTransfer_EnterpiseVersion >> test.py::test[join-nested_semi_join-] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on-off] |99.4%| [TM] {RESULT} ydb/tests/sql/py3test >> test.py::test[join-nopushdown_filter_with_depends_on-off] [SKIPPED] >> test.py::test[join-premap_map_inner-off] [SKIPPED] >> test.py::test[join-premap_merge_inner-] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate] [SKIPPED] >> test.py::test[join-star_join_mirror-] |99.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |99.4%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted-] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-] >> test.py::test[weak_field-weak_field_strict-] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact-] >> test_ydb_backup.py::TestClusterBackup::test_cluster_backup >> test.py::test[weak_field-weak_field_aggregation-] [GOOD] >> test.py::test[window-empty/aggregations-] >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] >> test.py::test[order_by-SortByTwoFieldsDesc-] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols-] >> test.py::test[join-star_join_mirror-] [GOOD] >> test.py::test[join-star_join_semionly-] >> test.py::test[join-star_join_with_diff_complex_key-] [GOOD] >> test.py::test[join-yql-12022-] [SKIPPED] >> test.py::test[join-yql-14829_left-] [SKIPPED] >> test.py::test[join-yql-8125-off] [SKIPPED] >> test.py::test[join-yql_465-] >> Transfer::CreateTransfer_EnterpiseVersion [GOOD] >> Transfer::CreateTransfer_TargetNotFound >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] >> test.py::test[key_filter-ranges-] [GOOD] >> test.py::test[like-like_clause_escape-default.txt] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/as_default/test.csv] [GOOD] >> Transfer::CreateTransfer_TargetNotFound [GOOD] >> Replication::Types >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/parse_error/test.csv] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] >> test.py::test[join-mergejoin_saves_output_sort_cross-] [GOOD] >> test.py::test[join-premap_common_inner-] [SKIPPED] >> test.py::test[join-premap_map_cross-off] [SKIPPED] >> test.py::test[join-pullup_random-off] [SKIPPED] >> test.py::test[join-yql_465-off] [SKIPPED] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] >> test.py::test[window-full/aggregations_leadlag_compact-] [GOOD] >> test.py::test[window-full/session_aliases_compact-] >> test.py::test[order_by-order_by_expr_mul_cols-] [GOOD] >> test.py::test[order_by-sort-] >> test_db_counters.py::TestStorageCounters::test_storage_counters[enable_separate_quotas] [GOOD] >> test.py::test[join-star_join_semionly-] [GOOD] >> test.py::test[join-yql-8980-] >> test.py::test[like-like_clause_escape-default.txt] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt] >> test.py::test[window-empty/aggregations-] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank-] >> test.py::test[join-yql_465-] [GOOD] >> test.py::test[key_filter-yql-19420-] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force-] [SKIPPED] >> test.py::test[optimizers-nonselected_direct_row-] [SKIPPED] >> test.py::test[optimizers-yql-7324_duplicate_arg-] >> ParseOptionsTest::EndpointAndDatabaseFromCommandLine >> ParseOptionsTest::EndpointAndDatabaseFromCommandLine [GOOD] >> ParseOptionsTest::NoDiscoveryCommandLine >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] >> ParseOptionsTest::NoDiscoveryCommandLine [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromActiveProfile [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromExplicitProfile >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromExplicitProfile [GOOD] >> ParseOptionsTest::IamToken >> test.py::test[order_by-sort-] [GOOD] >> test.py::test[pg-select_columnref1-default.txt] [SKIPPED] >> test.py::test[pg-select_table2-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q22-default.txt] >> ParseOptionsTest::IamToken [GOOD] >> ParseOptionsTest::YdbToken >> test.py::test[pg-tpcds-q22-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q47-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q50-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q76-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q79-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q02-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q05-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q15-default.txt] [SKIPPED] >> test.py::test[pg_catalog-lambda-] [SKIPPED] >> test.py::test[produce-process_multi_out-] [SKIPPED] >> test.py::test[produce-process_pure_with_sort-default.txt] [SKIPPED] >> test.py::test[produce-process_streaming_count-default.txt] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/parse_error/test.csv] [GOOD] >> test.py::test[window-full/session_aliases_compact-] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt] [SKIPPED] >> test.py::test[window-udaf_window-] [SKIPPED] >> test.py::test[window-win_by_all_percentile_interval-default.txt] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/as_default/test.csv] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt] [GOOD] >> test.py::test[lineage-window_member_struct-default.txt] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate-] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-05-05T03:25:44.716526Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T03:25:44.716625Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T03:25:44.716658Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-05-05T03:25:44.716697Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-05-05T03:25:44.716700Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:25:44.721169Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 05 May 2025 03:25:44 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-05-05T03:25:44.721224Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:25:54.864846Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T03:25:54.871861Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T03:25:54.875653Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-05-05T03:25:54.878591Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:25:54.881456Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:25:54.884471Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:25:54.887591Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:25:54.890407Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:25:54.893283Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:25:54.894669Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-05-05T03:25:54.894840Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-05-05T03:25:54.895016Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:25:54.895113Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:25:54.895116Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T03:25:54.994471Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:54 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:54.994581Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:25:54.994585Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T03:25:55.030489Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:55.030600Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:25:55.030605Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T03:25:55.066595Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:55.066694Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:25:55.066698Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T03:25:55.162492Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:55.162637Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:25:55.162643Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T03:25:55.182491Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:55.182607Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-05-05T03:25:55.182611Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-05-05T03:25:55.210930Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:55.210966Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:25:55.225968Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-05-05T03:25:55.226002Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:25:55.297772Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:25:55 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:25:55.297815Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:26:05.646322Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T03:26:05.646393Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T03:26:05.646440Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-05-05T03:26:05.646468Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-05-05T03:26:05.646471Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:26:05.651710Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Mon, 05 May 2025 03:26:05 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-05-05T03:26:05.651738Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:26:15.752510Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T03:26:15.753759Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T03:26:15.757009Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-05-05T03:26:15.759766Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T03:26:15.760884Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-05-05T03:26:15.761067Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-05-05T03:26:15.761111Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T03:26:15.761140Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-05-05T03:26:15.761143Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T03:26:15.777198Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Mon, 05 May 2025 03:26:15 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-05-05T03:26:15.777233Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-05-05T03:26:15.888571Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:26:15 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:26:15.888608Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-05-05T03:26:15.909021Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 03:26:15 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T03:26:15.909072Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:26:16.194250Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T03:26:16.194382Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T03:26:16.194417Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-05-05T03:26:16.194450Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-05-05T03:26:16.194459Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-05-05T03:26:16.197976Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 05 May 2025 03:26:16 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-05-05T03:26:16.198014Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:26:16.198076Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T03:26:16.198108Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-05-05T03:26:16.198130Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-05-05T03:26:16.198136Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T03:26:16.201702Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 05 May 2025 03:26:16 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-05-05T03:26:16.201733Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |99.4%| [TS] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> ParseOptionsTest::YdbToken [GOOD] >> ParseOptionsTest::StaticCredentials >> test.py::test[join-yql-8980-] [GOOD] >> test.py::test[key_filter-datetime-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] >> test.py::test[key_filter-datetime-default.txt] [SKIPPED] >> test.py::test[key_filter-nile_pred-] >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication >> test.py::test[optimizers-yql-7324_duplicate_arg-] [GOOD] >> test.py::test[order_by-assume_cut_prefix-] [SKIPPED] >> test.py::test[order_by-literal_with_assume-] [SKIPPED] >> test.py::test[order_by-order_by_expr_with_deps-default.txt] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] >> ParseOptionsTest::StaticCredentials [GOOD] >> ParseOptionsTest::AnonymousCredentials >> test.py::test[window-win_func_order_by_udf_empty_rank-] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt] [GOOD] >> test.py::test[lineage-process-default.txt] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt] [SKIPPED] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt] [SKIPPED] >> test.py::test[optimizers-sort_constraint_in_left-] >> ParseOptionsTest::AnonymousCredentials [GOOD] >> ParseOptionsTest::EnvPriority [GOOD] >> YdbDump::NotNullTypeDump >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] >> test_result_limits.py::TestResultLimits::test_many_rows >> YdbDump::NotNullTypeDump [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_DefaultValue >> YdbTopic::SupportedCodecs_TopicCreate_DefaultValue [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_UserValue |99.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/py3test >> test.py::test[window-win_func_order_by_udf_empty_rank-] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt] [GOOD] >> test.py::test[produce-reduce_with_python_input_stream-] >> YdbTopic::SupportedCodecs_TopicCreate_UserValue [GOOD] >> YdbTopic::SupportedCodecs_TopicAlter >> test.py::test[produce-reduce_with_python_input_stream-] [SKIPPED] >> test.py::test[sampling-subquery_filter-default.txt] [SKIPPED] >> test.py::test[schema-copy-other] [SKIPPED] >> test.py::test[schema-copy-schema] [SKIPPED] >> test.py::test[schema-insert-schema] [SKIPPED] >> test.py::test[schema-select_all-row_spec_part] >> test.py::test[optimizers-aggregate_over_aggregate-] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting-] [SKIPPED] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt] [SKIPPED] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/as_default/test.csv] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey-] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda-] [SKIPPED] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map-] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/parse_error/test.csv] >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] |99.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/py3test >> YdbTopic::SupportedCodecs_TopicAlter [GOOD] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_DefaultValue >> test.py::test[optimizers-sort_constraint_in_left-] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown-] [SKIPPED] >> test.py::test[optimizers-yql-3455_filter_sorted-] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_DefaultValue [GOOD] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_UserValue >> test.py::test[window-win_by_all_percentile_interval-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort-] >> test.py::test[key_filter-nile_pred-] [GOOD] >> test.py::test[like-regexp_clause-] >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_UserValue [GOOD] >> YdbWorkloadTopic::Default_RunFull >> test.py::test[order_by-order_by_num_key_and_subkey-] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column-] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/parse_error/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/as_default/multi_null.csv] >> test.py::test[schema-select_all-row_spec_part] [GOOD] >> test.py::test[like-regexp_clause-] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt] >> test.py::test[schema-select_simple-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test.py::test[optimizers-yql-3455_filter_sorted-] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row-] [SKIPPED] >> test.py::test[order_by-extract_members_over_sort_desc-] [SKIPPED] >> test.py::test[order_by-order_by_tuple-default.txt] >> test.py::test[window-win_func_aggr_4func_sort-] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map-] [GOOD] >> test.py::test[order_by-native_desc_publish-] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] >> test.py::test[window-win_func_aggr_hist-] >> test.py::test[order_by-native_desc_publish-] [SKIPPED] >> test.py::test[order_by-order_by_list_of_strings-] >> test.py::test[order_by-order_by_tablerecord_column-] [GOOD] >> test.py::test[order_by-sort_simple-] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/as_default/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/parse_error/multi_null.csv] >> test.py::test[schema-select_simple-default.txt] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc] >> test.py::test[order_by-sort_simple-] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt] [SKIPPED] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt] [GOOD] >> test.py::test[lineage-if_struct-default.txt] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range-] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort-] [SKIPPED] >> test.py::test[order_by-order_by_expr_over_sorted_table-] >> test.py::test[pg-tpcds-q48-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q69-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q71-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q95-default.txt] [SKIPPED] >> test.py::test[produce-process_with_python_stream-] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail] [SKIPPED] >> test.py::test[produce-reduce_with_assume-] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order-] [SKIPPED] >> test.py::test[sampling-orderedjoin_right_sample-default.txt] [SKIPPED] >> test.py::test[sampling-read-] >> test.py::test[order_by-order_by_tuple-default.txt] [GOOD] >> test.py::test[order_by-ordered_fill-] [SKIPPED] >> test.py::test[order_by-sort_decimals-] >> test.py::test[sampling-read-] [SKIPPED] >> test.py::test[schema-copy-read_schema] [SKIPPED] >> test.py::test[schema-patchtype-] >> test.py::test[window-win_func_aggr_hist-] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part-] >> test.py::test[order_by-order_by_list_of_strings-] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort-] [SKIPPED] >> test.py::test[pg-aggregate_combine-] [SKIPPED] >> test.py::test[pg-all_data-] [SKIPPED] >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> test.py::test[pg-wide_top_sort-] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/parse_error/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/as_default/multi_null.csv] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table-] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt] >> test.py::test[schema-patchtype-] [GOOD] >> test.py::test[schema-read_schema_change_other-] [SKIPPED] >> test.py::test[schema-remap_desc-] [SKIPPED] >> test.py::test[schema-select_all_inferschema-] >> test.py::test[schema-select_with_map-sorted_desc] [GOOD] >> test.py::test[select-dict_lookup-default.txt] >> test.py::test[order_by-sort_decimals-] [GOOD] >> test.py::test[pg-aggregate_minus_zero-] >> test.py::test[pg-aggregate_minus_zero-] [SKIPPED] >> test.py::test[pg-join_using_tables3-default.txt] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt] [SKIPPED] >> test.py::test[pg-select_subquery2-default.txt] [SKIPPED] >> test.py::test[pg-table_func-default.txt] [SKIPPED] >> test.py::test[pg-wide_sort-] >> test.py::test[window-win_func_lead_lag_worm_with_part-] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/as_default/multi_null.csv] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/parse_error/multi_null.csv] >> test_clickbench.py::TestClickbench::test_clickbench[11] >> Replication::PauseAndResumeReplication [GOOD] >> test.py::test[pg-wide_top_sort-] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q13-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q85-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q12-default.txt] [SKIPPED] >> test.py::test[produce-process_row_and_columns-default.txt] [SKIPPED] >> test.py::test[produce-process_with_lambda-default.txt] >> test.py::test[select-dict_lookup-default.txt] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt] >> test.py::test[schema-select_all_inferschema-] [GOOD] >> test.py::test[schema-select_field-read_schema] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/parse_error/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/as_default/multi_null.csv] >> test.py::test[window-win_multiaggr_tuple-default.txt] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [GOOD] >> test.py::test[pg-wide_sort-] [GOOD] >> test.py::test[pg-tpcds-q18-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q53-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q65-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q81-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q91-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q96-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q99-default.txt] >> test_ydb_backup.py::TestClusterBackup::test_cluster_backup [GOOD] >> test.py::test[pg-tpcds-q99-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q01-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q16-default.txt] [SKIPPED] >> test.py::test[produce-process_lambda_opt_args-default.txt] [SKIPPED] >> test.py::test[produce-process_rows_sorted_desc_multi_out-] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out-] [SKIPPED] >> test.py::test[produce-reduce_with_flat_python_stream-] [SKIPPED] >> test.py::test[produce-reduce_with_python_having-] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack-] [SKIPPED] >> test.py::test[ql_filter-integer_optional_null-] [SKIPPED] >> test.py::test[sampling-subquery_multiple_sample-default.txt] [SKIPPED] >> test.py::test[schema-limit_directread-] [SKIPPED] >> test.py::test[schema-select_all-row_spec_extra_sort] >> test.py::test[order_by-order_by_missing_project_column-default.txt] [GOOD] >> test.py::test[order_by-order_by_udf-] |99.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/py3test >> test.py::test[window-win_multiaggr_tuple-default.txt] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/replication/unittest >> Replication::PauseAndResumeReplication [GOOD] Test command err: DDL: CREATE TABLE `Table_1357650863647715341` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1357650863647715341` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_1357650863647715341` FROM `Topic_1357650863647715341` TO `Table_1357650863647715341` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:1662/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Scheme operation failed, status: ExecError, reason: The transfer is only available in the Enterprise version } >>>>> EXPECTED: The transfer is only available in the Enterprise version DDL: DROP TABLE `Table_1357650863647715341` DDL: DROP TOPIC `Topic_1357650863647715341` DDL: CREATE TOPIC `Topic_3851825234076277565` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_3851825234076277565` FROM `Topic_3851825234076277565` TO `Table_3851825234076277565` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:1662/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Scheme operation failed, status: ExecError, reason: The transfer destination path '/local/Table_3851825234076277565' not found } >>>>> EXPECTED: The transfer destination path '/local/Table_3851825234076277565' not found DDL: DROP TOPIC `Topic_3851825234076277565` DDL: CREATE TABLE `SourceTable_7870298821401450319` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_7870298821401450319` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_7870298821401450319` FOR `SourceTable_7870298821401450319` AS `Table_7870298821401450319` WITH ( CONNECTION_STRING = 'grpc://localhost:1662/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_7870298821401450319` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_7870298821401450319]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_7870298821401450319` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=18 count=0 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_7870298821401450319` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=17 count=1 DDL: DROP ASYNC REPLICATION `Replication_7870298821401450319`; DDL: DROP TABLE `SourceTable_7870298821401450319` DDL: CREATE TABLE `SourceTable_2645870669529739023` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_2645870669529739023` FOR `SourceTable_2645870669529739023` AS `Table_2645870669529739023` WITH ( CONNECTION_STRING = 'grpc://localhost:1662/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_2645870669529739023` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_2645870669529739023` ORDER BY `Message` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_2645870669529739023]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Message` FROM `Table_2645870669529739023` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_2645870669529739023` ORDER BY `Message` Attempt=17 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_2645870669529739023` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_2645870669529739023` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_2645870669529739023` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_2645870669529739023` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_2645870669529739023` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_2645870669529739023` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_2645870669529739023` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_2645870669529739023` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_2645870669529739023`; DDL: DROP TABLE `SourceTable_2645870669529739023` |99.4%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> test.py::test[produce-process_with_lambda-default.txt] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/as_default/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/parse_error/multi_null.csv] >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] |99.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/py3test >> test.py::test[schema-select_field-read_schema] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt] [SKIPPED] >> test.py::test[select-append_to_value_1000-] >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] >> test.py::test[select-select_concrete_detailed_columns-default.txt] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt] [SKIPPED] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_fail-] [SKIPPED] >> test.py::test[table_range-concat_empty_sorted_with_key_diff-] [SKIPPED] >> test.py::test[table_range-concat_sorted_max_tables-] [SKIPPED] >> test.py::test[type_v3-bare_yson-] [SKIPPED] >> test.py::test[weak_field-weak_field_in_group_by-] >> test.py::test[produce-process_with_lambda_outstream-default.txt] [GOOD] >> test.py::test[produce-reduce_multi_in-empty] [SKIPPED] >> test.py::test[ql_filter-integer_many_left-] [SKIPPED] >> test.py::test[sampling-bind_expr_udf-] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.5%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/parse_error/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/as_default/multi_null.csv] >> test.py::test[order_by-order_by_udf-] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q09-default.txt] >> test.py::test[pg-tpcds-q09-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q87-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q97-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q07-default.txt] >> test.py::test[schema-select_all-row_spec_extra_sort] [GOOD] >> test.py::test[schema-user_schema_missing_column-] >> test.py::test[pg-tpch-q07-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q17-default.txt] [SKIPPED] >> test.py::test[pg_duplicated-duplicated_rowspec-] [SKIPPED] >> test.py::test[pragma-config_exec-] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-] [SKIPPED] >> test.py::test[ql_filter-integer_eval-] >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] >> test.py::test[ql_filter-integer_eval-] [SKIPPED] >> test.py::test[ql_filter-integer_members-] [SKIPPED] >> test.py::test[sampling-bind_small_rate-default.txt] [SKIPPED] >> test.py::test[schema-def_values_job-] >> test.py::test[select-append_to_value_1000-] [GOOD] >> test.py::test[select-boolean_where-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000789/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000789/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1389915) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1392283 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[sampling-bind_expr_udf-] [GOOD] >> test.py::test[sampling-sort-default.txt] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size] [SKIPPED] >> test.py::test[sampling-topsort-default.txt] >> test.py::test[sampling-topsort-default.txt] [SKIPPED] >> test.py::test[schema-insert-row_spec] [SKIPPED] >> test.py::test[schema-insert_sorted-schema] [SKIPPED] >> test.py::test[schema-select_all-row_spec_diff_sort2] >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] >> test.py::test[weak_field-weak_field_in_group_by-] [GOOD] >> test.py::test[weak_field-weak_field_num_access-] >> test.py::test[schema-user_schema_missing_column-] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.5%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--true] >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test.py::test[select-boolean_where-] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/as_default/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/parse_error/multi_null.csv] >> test_workload.py::TestYdbWorkload::test[column] >> YdbWorkloadTopic::Default_RunFull [GOOD] >> YdbWorkloadTopic::Init_Clean >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] >> test.py::test[schema-def_values_job-] [GOOD] >> test.py::test[schema-other-] [SKIPPED] >> test.py::test[schema-select_all-read_schema] >> test.py::test[weak_field-weak_field_num_access-] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/parse_error/multi_null.csv] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt] [GOOD] >> test.py::test[select-trivial_having-default.txt] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v1] >> YdbWorkloadTopic::Init_Clean [GOOD] >> YdbWorkloadTopic::Clean_Without_Init >> YdbWorkloadTopic::Clean_Without_Init [GOOD] >> YdbWorkloadTopic::Double_Init >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] >> test.py::test[schema-select_all-row_spec_diff_sort2] [GOOD] >> test.py::test[schema-select_field-schema] >> test.py::test[select-corr_name_in_select-default.txt] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt] >> test.py::test[schema-select_all-read_schema] [GOOD] >> test.py::test[schema-select_field-row_spec] >> YdbWorkloadTopic::Double_Init [GOOD] >> YdbWorkloadTopic::Read_Statistics >> test.py::test[weak_field-weak_field_real_col-default.txt] [GOOD] >> test.py::test[window-current/session_extended-] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test.py::test[schema-select_field-schema] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] >> test.py::test[select-dict_lookup_by_key-default.txt] [GOOD] >> test.py::test[select-from_in_front-default.txt] >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] >> test.py::test[select-trivial_having-default.txt] [GOOD] >> test.py::test[select-use_cluster-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] >> test.py::test[schema-select_field-row_spec] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt] [SKIPPED] >> test.py::test[select-literal_negative-default.txt] >> test.py::test[select-dict_with_few_keys-default.txt] [GOOD] >> test.py::test[select-literal_bool-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] >> test.py::test[select-from_in_front-default.txt] [GOOD] >> test.py::test[select-scalar_subquery-default.txt] >> test.py::test[window-current/session_extended-] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key-] >> test.py::test[select-literal_negative-default.txt] [GOOD] >> test.py::test[select-multi_source_issue-default.txt] >> test.py::test[select-use_cluster-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> YdbWorkloadTopic::Read_Statistics [GOOD] >> YdbWorkloadTopic::Write_Statistics >> test.py::test[select-scalar_subquery-default.txt] [GOOD] >> test.py::test[select-select_all-default.txt] |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key-] [GOOD] >> test.py::test[window-full/session-] >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test.py::test[select-literal_bool-default.txt] [GOOD] >> test.py::test[select-sum_to_string-default.txt] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt] >> test.py::test[select-multi_source_issue-default.txt] [GOOD] >> test.py::test[select-select_all_filtered-default.txt] >> test.py::test[select-select_all-default.txt] [GOOD] >> test.py::test[select-trivial_group_by-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] >> test.py::test[window-full/session-] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag-] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables-] [SKIPPED] >> test.py::test[table_range-range_slash-] [SKIPPED] >> test.py::test[udf-named_args_for_script-] >> test.py::test[select-sum_to_string-default.txt] [GOOD] >> test.py::test[select-table_content_with_tmp_folder-] >> test.py::test[udf-named_args_for_script-] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2-] [SKIPPED] >> test.py::test[union-union_trivial-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] |99.5%| [TA] $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> test.py::test[window-lagging/aggregations_leadlag-] [GOOD] >> test.py::test[window-leading/aggregations_leadlag-] >> test.py::test[select-select_all_filtered-default.txt] [GOOD] >> test.py::test[select-substring-default.txt] >> test.py::test[select-trivial_group_by-default.txt] [GOOD] >> test.py::test[select-where_not_null-] >> test.py::test[select-table_content_with_tmp_folder-] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt] |99.5%| [TA] {RESULT} $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} >> YdbWorkloadTopic::Write_Statistics [GOOD] >> YdbWorkloadTopic::ReadWrite_Statistics >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test.py::test[union-union_trivial-default.txt] [GOOD] >> test.py::test[weak_field-weak_field_to_yson-] >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] >> test.py::test[select-substring-default.txt] [GOOD] >> test.py::test[select-where_cast-default.txt] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt] [SKIPPED] >> test.py::test[tpch-q19-default.txt] >> test.py::test[window-leading/aggregations_leadlag-] [GOOD] >> test.py::test[window-win_extract_members-default.txt] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] >> test.py::test[select-where_not_null-] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt] >> test.py::test[weak_field-weak_field_to_yson-] [GOOD] >> test.py::test[window-full/noncompact_with_nulls-] >> test.py::test[select-where_cast-default.txt] [GOOD] >> test.py::test[table_range-range_tables_with_view-] [SKIPPED] >> test.py::test[table_range-range_with_view-] [SKIPPED] >> test.py::test[type_v3-mixed_with_columns-] >> test.py::test[tpch-q19-default.txt] [GOOD] >> test.py::test[udf-python_script-] [SKIPPED] >> test.py::test[udf-regexp_udf-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_filter_combine-] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt] >> test.py::test[window-win_extract_members-default.txt] [GOOD] >> test.py::test[window-win_func_lead_lag_opt-] >> YdbWorkloadTopic::ReadWrite_Statistics [GOOD] >> YdbWorkloadTopic::Write_Statistics_UseTx >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test.py::test[window-full/noncompact_with_nulls-] [GOOD] >> test.py::test[window-mixed/aggregations-] >> test.py::test[type_v3-mixed_with_columns-] [GOOD] >> test.py::test[type_v3-non_strict-] [SKIPPED] >> test.py::test[view-file_outer_library-] [SKIPPED] >> test.py::test[view-system_udf-] >> test.py::test[weak_field-optimize_weak_fields_filter_combine-] [GOOD] >> test.py::test[window-current/ansi_current_with_win-] >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] >> test.py::test[view-system_udf-] [GOOD] >> test.py::test[view-view_with_lambda_process-] >> test.py::test[simple_columns-simple_columns_subreq-default.txt] [GOOD] >> test.py::test[tpch-q11-default.txt] >> test.py::test[window-mixed/aggregations-] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part-] >> test.py::test[window-win_func_lead_lag_opt-] [GOOD] >> test.py::test[ypath-empty_range-] [SKIPPED] >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] |99.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/py3test >> test.py::test[ypath-empty_range-] [SKIPPED] >> test.py::test[window-current/ansi_current_with_win-] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag-] >> test.py::test[view-view_with_lambda_process-] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson-] |99.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/py3test >> KqpFederatedQuery::ExecuteScriptWithThinFile [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericQuery >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> test_ydb_backup.py::TestDatabaseBackup::test_database_backup |99.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.6%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test.py::test[window-generic/aggregations_mixed_leadlag-] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt] >> test.py::test[tpch-q11-default.txt] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm-] [SKIPPED] >> test.py::test[union_all-path_and_record-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericQuery [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericScript >> test.py::test[window-win_func_aggr_4func_no_part-] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] >> test.py::test[weak_field-weak_field_esc_yson-] [GOOD] >> test.py::test[weak_field-weak_field_rest-] >> YdbWorkloadTopic::Write_Statistics_UseTx [GOOD] >> YdbWorkloadTopic::Full_Statistics_UseTx >> test.py::test[union_all-path_and_record-default.txt] [GOOD] >> test.py::test[union_all-union_all_multiin-] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v1] [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericScript [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericQuery >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v2] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] |99.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/py3test >> test.py::test[window-win_func_auto_arg_two_sort-default.txt] [GOOD] |99.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/py3test >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test.py::test[weak_field-weak_field_rest-] [GOOD] >> test.py::test[window-empty/aggregations_leadlag-] >> test_clickbench.py::TestClickbench::test_clickbench[38] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt] [GOOD] >> test.py::test[window-win_inline_spec-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericQuery [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericScript >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] >> test.py::test[union_all-union_all_multiin-] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> test.py::test[window-empty/aggregations_leadlag-] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow-] >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericScript [GOOD] >> KqpFederatedQuery::OverridePlannerDefaults >> test.py::test[window-win_inline_spec-default.txt] [GOOD] >> YdbWorkloadTopic::Full_Statistics_UseTx [GOOD] >> YdbWorkloadTopic::WriteInTx >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/py3test >> test.py::test[window-win_inline_spec-default.txt] [GOOD] |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> KqpFederatedQuery::OverridePlannerDefaults [GOOD] >> KqpFederatedQuery::TestReadEmptyFileWithCsvFormat >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting |99.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/py3test >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> test.py::test[window-full/noncompact_with_tablerow-] [GOOD] >> test.py::test[window-full/session_incompat_sort-] >> test.py::test[union_all-union_all_subexpr-default.txt] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt] >> KqpFederatedQuery::TestReadEmptyFileWithCsvFormat [GOOD] >> KqpFederatedQuery::TestWildcardValidation >> KqpFederatedQuery::TestWildcardValidation [GOOD] >> KqpFederatedQuery::TestSecretsExistingValidation |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> test.py::test[window-full/session_incompat_sort-] [GOOD] >> test.py::test[window-win_multiaggr-default.txt] |99.6%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test.py::test[union_all-union_all_with_limits-default.txt] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine-] >> test.py::TestSqsSplitMergeFifoTables::test_fifo_merge_split [GOOD] >> KqpFederatedQuery::TestSecretsExistingValidation [GOOD] >> KqpFederatedQuery::TestOlapToS3Insert >> test.py::test[window-win_multiaggr-default.txt] [GOOD] >> test.py::test[ypath-multi_range-default.txt] >> test.py::test[weak_field-optimize_weak_fields_map_combine-] [GOOD] >> test.py::test[window-generic/aggregations_mixed-] >> test.py::test[ypath-multi_range-default.txt] [GOOD] >> KqpFederatedQuery::TestOlapToS3Insert [GOOD] >> KqpFederatedQuery::TestReadLargeParquetFile |99.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/py3test >> test.py::test[ypath-multi_range-default.txt] [GOOD] >> test_ydb_backup.py::TestDatabaseBackup::test_database_backup [GOOD] |99.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/py3test >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] >> test.py::test[window-generic/aggregations_mixed-] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc-] >> test.py::test[window-win_func_aggr_4func_sort_desc-] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt] >> KqpFederatedQuery::TestReadLargeParquetFile [GOOD] >> KqpFederatedQuery::TestLocalReadLargeParquetFile >> test.py::test[window-win_func_part_by_expr_new-default.txt] [GOOD] >> test.py::test[window-win_func_rank_by_part-] >> YdbWorkloadTopic::WriteInTx [GOOD] >> YdbWorkloadTopic::WriteProducesToAllPartitionsEvenly >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v1] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test.py::test[window-win_func_rank_by_part-] [GOOD] >> test.py::test[window-win_func_special-] >> KqpFederatedQuery::TestLocalReadLargeParquetFile [GOOD] >> KqpFederatedSchemeTest::ExternalTableDdl >> KqpFederatedSchemeTest::ExternalTableDdl [GOOD] >> KqpFederatedSchemeTest::InvalidDropForExternalTableWithAuth >> test.py::test[window-win_func_special-] [GOOD] >> test.py::test[window-win_over_few_partitions_other-] >> KqpFederatedSchemeTest::InvalidDropForExternalTableWithAuth [GOOD] >> KqpFederatedSchemeTest::ExternalTableDdlLocationValidation >> test_log_scenario.py::TestLogScenario::test[180] [GOOD] >> test_log_scenario.py::TestLogScenario::test[1051200] >> KqpFederatedSchemeTest::ExternalTableDdlLocationValidation [GOOD] >> KqpS3PlanTest::S3Source >> test.py::test[window-win_over_few_partitions_other-] [GOOD] >> test.py::test[ypath-complex-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. >> KqpS3PlanTest::S3Source [GOOD] >> KqpS3PlanTest::S3Sink >> KqpS3PlanTest::S3Sink [GOOD] >> KqpS3PlanTest::S3CreateTableAsSelect |99.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/py3test >> test.py::TestSqsSplitMergeFifoTables::test_fifo_merge_split [GOOD] |99.7%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/py3test >> test_ydb_backup.py::TestClusterBackupRestore::test_cluster_backup_restore >> KqpS3PlanTest::S3CreateTableAsSelect [GOOD] >> KqpS3PlanTest::S3Insert >> test.py::test[ypath-complex-default.txt] [GOOD] |99.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/py3test >> test.py::test[ypath-complex-default.txt] [GOOD] >> KqpS3PlanTest::S3Insert [GOOD] |99.7%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/s3/unittest >> KqpS3PlanTest::S3Insert [GOOD] Test command err: Trying to start YDB, gRPC: 23475, MsgBus: 5373 2025-05-05T03:23:53.849749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500797169978898663:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000567/r3tmp/tmpZejhYN/pdisk_1.dat 2025-05-05T03:23:53.931600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T03:23:53.960884Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23475, node 1 2025-05-05T03:23:53.972422Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:23:53.972434Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:23:53.972436Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:23:53.972483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5373 2025-05-05T03:23:54.024087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:54.024117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:54.025390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T03:23:54.067138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:54.071564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-05-05T03:23:54.266492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500797174273866412:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:54.266533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:54.335703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-05T03:23:54.337775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-05T03:23:54.352736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-05-05T03:23:54.353931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-05T03:23:54.354470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-05-05T03:23:54.540096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500797174273866704:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:54.540127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:54.540266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500797174273866709:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:54.541133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:2, at schemeshard: 72057594046644480 2025-05-05T03:23:54.542956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500797174273866711:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-05-05T03:23:54.627425Z node 1 :TX_PROXY ERROR: Actor# [1:7500797174273866751:2518] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-05T03:23:55.064584Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746415435107, txId: 281474976710683] shutting down Trying to start YDB, gRPC: 6491, MsgBus: 5951 2025-05-05T03:23:55.469624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500797181306398878:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:23:55.497398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000567/r3tmp/tmpQFAYVh/pdisk_1.dat 2025-05-05T03:23:55.520468Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6491, node 2 2025-05-05T03:23:55.540334Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:23:55.540360Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:23:55.540362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:23:55.540409Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5951 TClient is connected to server localhost:5951 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-05-05T03:23:55.598590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:23:55.598625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:23:55.599824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T03:23:55.604035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:23:55.616286Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T03:23:56.016514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500797185601366664:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:56.016537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:56.018386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T03:23:56.020238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T03:23:56.031845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500797185601366714:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:56.031890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:56.032187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500797185601366719:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:23:56.033191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propose ... _TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.500569Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.500860Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.501028Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.501411Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.501536Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.501959Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.502070Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.502554Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.502622Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.503278Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.503325Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.503963Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.504242Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.504487Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.505009Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.505078Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.505827Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.505884Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.506687Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.506759Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.507513Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.507543Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.508223Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.508374Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.508829Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.509167Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T03:27:35.509490Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olap_source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["data (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/olap_source","E-Rows":"No estimate","Table":"olap_source","ReadColumns":["data"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"FirstMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"Bytes":{"Count":1,"Sum":15,"Max":15,"Min":15}},"Name":"3","Push":{"LastMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"FirstMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"PauseMessageMs":{"Count":1,"Sum":16,"Max":16,"Min":16},"WaitTimeUs":{"Count":42,"Sum":608941,"Max":18154,"Min":8155},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":20,"Min":16}}}],"MaxMemoryUsage":{"Count":42,"Sum":44040192,"Max":1048576,"Min":1048576},"IngressBytes":{"Count":1,"Sum":21,"Max":21,"Min":21},"Tasks":42,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"FinishedTasks":42,"IngressRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/olap_source","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":21,"Max":21,"Min":21}}],"BaseTimeMs":1746415655547,"OutputBytes":{"Count":1,"Sum":15,"Max":15,"Min":15},"CpuTimeUs":{"Count":42,"Sum":2927,"Max":189,"Min":50},"Ingress":[{"Pop":{},"External":{"PartitionCount":1,"LastMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"ExternalRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ExternalBytes":{"Count":1,"Sum":21,"Max":21,"Min":21},"FirstMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20}},"Name":"CS","Ingress":{"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"FirstMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"Bytes":{"Count":1,"Sum":21,"Max":21,"Min":21}},"Push":{}}],"UpdateTimeMs":21}}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"ExternalDataSource":"insert_data_sink","Extension":".parquet","Name":"Write insert_data_sink","SinkType":"s3"}],"Node Type":"Stage-Sink","Stats":{"UseLlvm":"undefined","MaxMemoryUsage":{"Count":42,"Sum":44040192,"Max":1048576,"Min":1048576},"DurationUs":{"Count":1,"Sum":7000,"Max":7000,"Min":7000},"InputBytes":{"Count":1,"Sum":15,"Max":15,"Min":15},"Tasks":42,"FinishedTasks":42,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":21,"Max":21,"Min":21},"FirstMessageMs":{"Count":1,"Sum":21,"Max":21,"Min":21},"Bytes":{"Count":1,"Sum":660,"Max":660,"Min":660}},"Name":"S3Sink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":42,"Sum":737,"Max":27,"Min":16},"ActiveMessageMs":{"Count":42,"Max":27,"Min":16},"FirstMessageMs":{"Count":42,"Sum":731,"Max":21,"Min":16},"Bytes":{"Count":1,"Sum":328,"Max":328,"Min":328},"ActiveTimeUs":{"Count":1,"Sum":6000,"Max":6000,"Min":6000}},"Push":{"LastMessageMs":{"Count":1,"Sum":21,"Max":21,"Min":21},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":21,"Max":21,"Min":21},"FirstMessageMs":{"Count":1,"Sum":21,"Max":21,"Min":21},"Bytes":{"Count":1,"Sum":660,"Max":660,"Min":660},"PauseMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"WaitTimeUs":{"Count":42,"Sum":669767,"Max":20394,"Min":14372},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":21,"Min":20}}}],"InputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":1,"StageDurationUs":11000,"EgressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"BaseTimeMs":1746415655547,"EgressBytes":{"Count":1,"Sum":328,"Max":328,"Min":328},"CpuTimeUs":{"Count":42,"Sum":1606,"Max":304,"Min":19},"UpdateTimeMs":27,"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"FirstMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"Bytes":{"Count":1,"Sum":15,"Max":15,"Min":15}},"Name":"1","Push":{"LastMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"FirstMessageMs":{"Count":1,"Sum":20,"Max":20,"Min":20},"Bytes":{"Count":1,"Sum":15,"Max":15,"Min":15},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":42,"Sum":669474,"Max":20094,"Min":14371},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":20,"Min":1}}}]}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":21078,"CpuTimeUs":20313},"ProcessCpuTimeUs":117,"TotalDurationUs":53151,"ResourcePoolId":"default","QueuedTimeUs":159},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"ExternalDataSource":"insert_data_sink","Extension":".parquet","A-SelfCpu":0.304,"A-Cpu":0.304,"Name":"Write insert_data_sink","SinkType":"s3"}],"Node Type":"Write insert_data_sink"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} |99.7%| [TM] {RESULT} ydb/core/kqp/ut/federated_query/s3/unittest >> YdbWorkloadTopic::WriteProducesToAllPartitionsEvenly [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Run >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v2] >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-05 03:27:37,723 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 03:27:37,960 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1090744 167M 171M 113M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/177e/000d5c/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1093100 966M 967M 703M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk 1094447 117M 117M 89.4M └─ moto_server s3 --port 30680 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/177e/000d5c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/177e/000d5c', '--source-root', '/home/runner/.ya/build/build_root/177e/000d5c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/177e/000d5c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/177e/000d5c', '--source-root', '/home/runner/.ya/build/build_root/177e/000d5c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/177e/000d5c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |99.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v1] >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TA] $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v2] |99.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> test_ydb_backup.py::TestClusterBackupRestore::test_cluster_backup_restore [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> YdbWorkloadTransferTopicToTable::Default_Run [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Init_Clean |99.7%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v1] |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v2] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v1] |99.8%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v2] >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.8%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v2] [GOOD] >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Init_Clean [GOOD] >> YdbWorkloadTransferTopicToTable::Specific_Init_Clean >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000798/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/000798/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1388380) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1391837 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore [GOOD] |99.8%| [TA] $(B)/ydb/tests/fq/s3/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/fq/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> YdbWorkloadTransferTopicToTable::Specific_Init_Clean [GOOD] >> YdbWorkloadTransferTopicToTable::Clean_Without_Init >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] >> test_result_limits.py::TestResultLimits::test_large_row |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] |99.8%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore [GOOD] >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} >> YdbWorkloadTransferTopicToTable::Clean_Without_Init [GOOD] >> YdbWorkloadTransferTopicToTable::Double_Init >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1610230) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |99.9%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] >> YdbWorkloadTransferTopicToTable::Double_Init [GOOD] >> YdbWorkloadTransferTopicToTable::Statistics >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] |99.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> YdbWorkloadTransferTopicToTable::Statistics [GOOD] |99.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/apps/ydb/ut/unittest >> YdbWorkloadTransferTopicToTable::Statistics [GOOD] |99.9%| [TM] {RESULT} ydb/apps/ydb/ut/unittest >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] >> S3AwsCredentials::ExecuteScriptWithEqSymbol >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> S3AwsCredentials::ExecuteScriptWithEqSymbol [GOOD] >> S3AwsCredentials::TestInsertEscaping >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] >> S3AwsCredentials::TestInsertEscaping [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00085e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/177e/00085e/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1700936 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |99.9%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/external_sources/s3/ut/unittest >> S3AwsCredentials::TestInsertEscaping [GOOD] Test command err: Trying to start YDB, gRPC: 65453, MsgBus: 26900 2025-05-05T03:30:20.914299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500798835392546631:2216];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:30:20.914507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000f7b/r3tmp/tmpJKxX3O/pdisk_1.dat 2025-05-05T03:30:20.982416Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65453, node 1 2025-05-05T03:30:21.012426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:30:21.012442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:30:21.012445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:30:21.012495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-05T03:30:21.019016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:30:21.019043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:30:21.020161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26900 TClient is connected to server localhost:26900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T03:30:21.077885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:30:21.086069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T03:30:21.633094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500798839687514416:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:21.633114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:21.919153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-05T03:30:21.982717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500798839687514548:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:21.982736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500798839687514553:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:21.982737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:21.983592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-05-05T03:30:21.989741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500798839687514555:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-05T03:30:22.088049Z node 1 :TX_PROXY ERROR: Actor# [1:7500798843982481891:2395] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-05T03:30:22.221890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.258594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-05-05T03:30:22.301243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.346584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.385002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.425476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-05T03:30:22.439376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.674759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.675789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.678611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.678839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.678972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-05-05T03:30:22.810471Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500798843982483348:2701], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
: Error: The specified bucket does not exist, error: code: NoSuchBucket, request id: [3de94eba-2dac07e-5c1e49d4-be3376b3] 2025-05-05T03:30:22.810756Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjVlNmI1NDUtZWFiNjlkM2QtYzY3ZjBmZTgtOTljZWZkY2M=, ActorId: [1:7500798843982483346:2700], ActorState: ExecuteState, TraceId: 01jtf7x9tnampvmgk3cebpvwne, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-05T03:30:23.910974Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500798848277451104:2868], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
: Error: The specified bucket does not exist, error: code: NoSuchBucket, request id: [ad1f85a4-2877ba95-7fc16b45-78a85742] 2025-05-05T03:30:23.911025Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTdjMDM0Mi03NDM4OWQ4OC0xOTJmZmUzYy1lODlhMWJjMQ==, ActorId: [1:7500798848277451102:2867], ActorState: ExecuteState, TraceId: 01jtf7xb10earcm0twd4j9mfcf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-05T03:30:25.063477Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746415825105, txId: 281474976715757] shutting down 2025-05-05T03:30:25.119535Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746415825161, txId: 281474976715769] shutting down 2025-05-05T03:30:25.139235Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500798856867386872:3246], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:21: Error: Failed to load metadata for table: /Root/external_data_source.[/a/]
: Error: secret with name 'id' not found 2025-05-05T03:30:25.139288Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmRlYjBlYWEtOTJkMzc3MWMtYzVlYTQ5NjgtZWFlYWM3MjY=, ActorId: [1:7500798856867386866:3243], ActorState: ExecuteState, TraceId: 01jtf7xc7165xda1h3rch0ntnm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-05T03:30:25.152956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715780:0, at schemeshard: 72057594046644480 2025-05-05T03:30:25.214866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-05-05T03:30:25.315691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715786:0, at schemeshard: 72057594046644480 2025-05-05T03:30:25.352138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715789:0, at schemeshard: 72057594046644480 2025-05-05T03:30:25.587432Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746415825630, txId: 281474976715815] shutting down 2025-05-05T03:30:25.636675Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746415825679, txId: 281474976715827] shutting down Trying to start YDB, gRPC: 7386, MsgBus: 63880 2025-05-05T03:30:25.985844Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500798853736240617:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T03:30:25.986184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/177e/000f7b/r3tmp/tmpLo4GJ6/pdisk_1.dat 2025-05-05T03:30:25.995613Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7386, node 2 2025-05-05T03:30:26.006818Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T03:30:26.006829Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T03:30:26.006830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T03:30:26.006849Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63880 TClient is connected to server localhost:63880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T03:30:26.088850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T03:30:26.088872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T03:30:26.089226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T03:30:26.089865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T03:30:26.677974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500798858031208546:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:26.678000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:26.987556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-05T03:30:26.994843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500798858031208676:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:26.994885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:26.994894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500798858031208681:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T03:30:26.995300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-05T03:30:27.001853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7500798858031208683:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-05T03:30:27.075077Z node 2 :TX_PROXY ERROR: Actor# [2:7500798862326176019:2387] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-05T03:30:27.113318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.150840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2025-05-05T03:30:27.244306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.287494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.325831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.363598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-05T03:30:27.374260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.616260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.620173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.620524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.620955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-05-05T03:30:27.962402Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746415828010, txId: 281474976715732] shutting down 2025-05-05T03:30:27.969804Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7500798862326178333:2984], status: INTERNAL_ERROR, issues:
: Fatal: Table metadata loading, code: 1050
:2:17: Fatal: Failed to load metadata for table: /Root/external_data_source.[exp_folder/some_ !"#$%&'()+,-./0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_abcdefghijklmnopqrstuvwxyz|~`/]
: Fatal: couldn't load table metadata: parameter is not supported with type inference: data.datetime.format, code: 1 2025-05-05T03:30:27.969857Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZmOTk3OGUtZGM0ODNiMzYtNzJiMWZkMWEtNmVhYjY1MWE=, ActorId: [2:7500798862326178331:2983], ActorState: ExecuteState, TraceId: 01jtf7xeztb6tmhmcd193py6ex, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: |99.9%| [TM] {RESULT} ydb/core/external_sources/s3/ut/unittest >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] 2025-05-05 03:30:57,225 WARNING libarchive: File (test_tpch.py.TestTpchS1.test_tpch.1/cluster/slot_1/logfile_ac020rvy.log) size has changed. Can't write more data than was declared in the tar header (134401545). (probably file was changed during archiving) |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_log_scenario.py::TestLogScenario::test[1051200] [GOOD] >> zip_bomb.py::TestZipBomb::test >> zip_bomb.py::TestZipBomb::test [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> alter_compression.py::TestAlterCompression::test_all_supported_compression [GOOD] >> alter_compression.py::TestAlterCompression::test_availability_data ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> zip_bomb.py::TestZipBomb::test [GOOD] Test command err: Pid 1756044 upsert #0 ok, result: [] upsert #1 ok, result: [] Rss after upsert 634964 [{'column0': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx1', 'column1': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx2', 'column2': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ... xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx3', 'column3': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx4', 'column4': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx5'}] Max rss {} 3569200 >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/olap/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |99.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] 2025-05-05 03:33:42,561 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 03:33:42,721 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1542128 1.5G 1.5G 1.4G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/177e/0004e4/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1758747 741M 751M 524M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_sch Test command err: test_suffix, num 0, table path alter_table start_time 1746415428.3937619 Path alter_table removed Path alter_table removed test_suffix, num 0, table path alter_tablestore start_time 1746415430.454734 Path alter_tablestore removed Path alter_tablestore removed test_suffix, num 0, table path table start_time 1746415432.025797 Path table removed Path table removed test_suffix, num 0, table path tablestores start_time 1746415433.5457096 Path tablestores removed Path tablestores removed test_suffix, num 0, table path read_data_during_bulk_upsert0 start_time 1746415459.893749 test_suffix, num 1, table path read_data_during_bulk_upsert1 start_time 1746415459.8951166 test_suffix, num 2, table path read_data_during_bulk_upsert2 start_time 1746415459.895558 test_suffix, num 4, table path read_data_during_bulk_upsert4 start_time 1746415459.8998604 test_suffix, num 6, table path read_data_during_bulk_upsert6 start_time 1746415459.900502 test_suffix, num 8, table path read_data_during_bulk_upsert8 start_time 1746415459.901211 test_suffix, num 3, table path read_data_during_bulk_upsert3 start_time 1746415459.899272 test_suffix, num 5, table path read_data_during_bulk_upsert5 start_time 1746415459.9001746 test_suffix, num 9, table path read_data_during_bulk_upsert9 start_time 1746415459.901507 test_suffix, num 7, table path read_data_during_bulk_upsert7 start_time 1746415459.9008937 Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert2 removed test_suffix, num 0, table path read_data_during_bulk_upsert start_time 1746415554.1110263 Path read_data_during_bulk_upsert removed Path read_data_during_bulk_upsert removed test_suffix, num 0, table path alter_compression start_time 1746415631.1135907 Path alter_compression removed Path alter_compression removed test_suffix, num 0, table path many_tables start_time 1746415807.207189 Path many_tables removed contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). Path many_tables removed test_suffix, num 0, table path read_update_write_load start_time 1746415958.6574867 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 460 30% 460 50% 460 90% 460 99% 460 ms Write: 10% 1196 30% 1196 50% 1196 90% 1196 99% 1196 ms Write: 10% 6838 30% 6838 50% 6838 90% 6838 99% 6838 ms Write: 10% 6824 30% 6824 50% 6824 90% 6824 99% 6824 ms Write: 10% 6729 30% 6729 50% 6729 90% 6729 99% 6729 ms Write: 10% 6394 30% 6394 50% 6394 90% 6394 99% 6394 ms Write: 10% 6314 30% 6314 50% 6314 90% 6314 99% 6314 ms Write: 10% 6606 30% 6606 50% 6606 90% 6606 99% 6606 ms Write: 10% 6180 30% 6180 50% 6180 90% 6180 99% 6180 ms Write: 10% 6156 30% 6156 50% 6156 90% 6156 99% 6156 ms Write: 10% 5679 30% 5679 50% 5679 90% 5679 99% 5679 ms Write: 10% 5794 30% 5794 50% 5794 90% 5794 99% 5794 ms Write: 10% 5412 30% 5412 50% 5412 90% 5412 99% 5412 ms Write: 10% 5286 30% 5286 50% 5286 90% 5286 99% 5286 ms Write: 10% 5361 30% 5361 50% 5361 90% 5361 99% 5361 ms Write: 10% 5244 30% 5244 50% 5244 90% 5244 99% 5244 ms Write: 10% 5031 30% 5031 50% 5031 90% 5031 99% 5031 ms Write: 10% 5011 30% 5011 50% 5011 90% 5011 99% 5011 ms Write: 10% 4488 30% 4488 50% 4488 90% 4488 99% 4488 ms Write: 10% 4294 30% 4294 50% 4294 90% 4294 99% 4294 ms Write: 10% 4571 30% 4571 50% 4571 90% 4571 99% 4571 ms Write: 10% 4448 30% 4448 50% 4448 90% 4448 99% 4448 ms Write: 10% 4438 30% 4438 50% 4438 90% 4438 99% 4438 ms Write: 10% 4503 30% 4503 50% 4503 90% 4503 99% 4503 ms Write: 10% 4265 30% 4265 50% 4265 90% 4265 99% 4265 ms Write: 10% 3996 30% 3996 50% 3996 90% 3996 99% 3996 ms Write: 10% 3762 30% 3762 50% 3762 90% 3762 99% 3762 ms Write: 10% 3932 30% 3932 50% 3932 90% 3932 99% 3932 ms Write: 10% 4035 30% 4035 50% 4035 90% 4035 99% 4035 ms Write: 10% 3751 30% 3751 50% 3751 90% 3751 99% 3751 ms Write: 10% 3659 30% 3659 50% 3659 90% 3659 99% 3659 ms Write: 10% 3608 30% 3608 50% 3608 90% 3608 99% 3608 ms Write: 10% 3666 30% 3666 50% 3666 90% 3666 99% 3666 ms Write: 10% 3521 30% 3521 50% 3521 90% 3521 99% 3521 ms Write: 10% 3713 30% 3713 50% 3713 90% 3713 99% 3713 ms Write: 10% 3273 30% 3273 50% 3273 90% 3273 99% 3273 ms Write: 10% 3402 30% 3402 50% 3402 90% 3402 99% 3402 ms Write: 10% 3821 30% 3821 50% 3821 90% 3821 99% 3821 ms Write: 10% 3275 30% 3275 50% 3275 90% 3275 99% 3275 ms Write: 10% 2955 30% 2955 50% 2955 90% 2955 99% 2955 ms Write: 10% 2848 30% 2848 50% 2848 90% 2848 99% 2848 ms Write: 10% 2844 30% 2844 50% 2844 90% 2844 99% 2844 ms Write: 10% 2832 30% 2832 50% 2832 90% 2832 99% 2832 ms Write: 10% 2708 30% 2708 50% 2708 90% 2708 99% 2708 ms Write: 10% 2456 30% 2456 50% 2456 90% 2456 99% 2456 ms Write: 10% 2097 30% 2097 50% 2097 90% 2097 99% 2097 ms Write: 10% 2860 30% 2860 50% 2860 90% 2860 99% 2860 ms Write: 10% 1698 30% 1698 50% 1698 90% 1698 99% 1698 ms Write: 10% 2796 30% 2796 50% 2796 90% 2796 99% 2796 ms Write: 10% 2890 30% 2890 50% 2890 90% 2890 99% 2890 ms Write: 10% 1938 30% 1938 50% 1938 90% 1938 99% 1938 ms Write: 10% 1701 30% 1701 50% 1701 90% 1701 99% 1701 ms Write: 10% 1565 30% 1565 50% 1565 90% 1565 99% 1565 ms Write: 10% 1927 30% 1927 50% 1927 90% 1927 99% 1927 ms Write: 10% 2290 30% 2290 50% 2290 90% 2290 99% 2290 ms Write: 10% 1651 30% 1651 50% 1651 90% 1651 99% 1651 ms Write: 10% 1892 30% 1892 50% 1892 90% 1892 99% 1892 ms Write: 10% 1418 30% 1418 50% 1418 90% 1418 99% 1418 ms Write: 10% 1243 30% 1243 50% 1243 90% 1243 99% 1243 ms Write: 10% 1573 30% 1573 50% 1573 90% 1573 99% 1573 ms Write: 10% 1512 30% 1512 50% 1512 90% 1512 99% 1512 ms Write: 10% 1680 30% 1680 50% 1680 90% 1680 99% 1680 ms Write: 10% 1393 30% 1393 50% 1393 90% 1393 99% 1393 ms Write: 10% 1326 30% 1326 50% 1326 90% 1326 99% 1326 ms Step 2. read write Write: 10% 6858 30% 6858 50% 6858 90% 6858 99% 6858 ms Write: 10% 7113 30% 7113 50% 7113 90% 7113 99% 7113 ms Write: 10% 7263 30% 7263 50% 7263 90% 7263 99% 7263 ms Write: 10% 6519 30% 6519 50% 6519 90% 6519 99% 6519 ms Write: 10% 6502 30% 6502 50% 6502 90% 6502 99% 6502 ms Write: 10% 6549 30% 6549 50% 6549 90% 6549 99% 6549 ms Write: 10% 6247 30% 6247 50% 6247 90% 6247 99% 6247 ms Write: 10% 6232 30% 6232 50% 6232 90% 6232 99% 6232 ms Write: 10% 6156 30% 6156 50% 6156 90% 6156 99% 6156 ms Write: 10% 6208 30% 6208 50% 6208 90% 6208 99% 6208 ms Write: 10% 6077 30% 6077 50% 6077 90% 6077 99% 6077 ms Write: 10% 5235 30% 5235 50% 5235 90% 5235 99% 5235 ms Write: 10% 5910 30% 5910 50% 5910 90% 5910 99% 5910 ms Write: 10% 5808 30% 5808 50% 5808 90% 5808 99% 5808 ms Write: 10% 5974 30% 5974 50% 5974 90% 5974 99% 5974 ms Write: 10% 5310 30% 5310 50% 5310 90% 5310 99% 5310 ms Write: 10% 5176 30% 5176 50% 5176 90% 5176 99% 5176 ms Write: 10% 4971 30% 4971 50% 4971 90% 4971 99% 4971 ms Write: 10% 5009 30% 5009 50% 5009 90% 5009 99% 5009 ms Write: 10% 4245 30% 4245 50% 4245 90% 4245 99% 4245 ms Write: 10% 3590 30% 3590 50% 3590 90% 3590 99% 3590 ms Write: 10% 4931 30% 4931 50% 4931 90% 4931 99% 4931 ms Write: 10% 4314 30% 4314 50% 4314 90% 4314 99% 4314 ms Write: 10% 3960 30% 3960 50% 3960 90% 3960 99% 3960 ms Write: 10% 3540 30% 3540 50% 3540 90% 3540 99% 3540 ms Write: 10% 4555 30% 4555 50% 4555 90% 4555 99% 4555 ms Write: 10% 3461 30% 3461 50% 3461 90% 3461 99% 3461 ms Write: 10% 3931 30% 3931 50% 3931 90% 3931 99% 3931 ms Write: 10% 3570 30% 3570 50% 3570 90% 3570 99% 3570 ms Write: 10% 4183 30% 4183 50% 4183 90% 4183 99% 4183 ms Write: 10% 3456 30% 3456 50% 3456 90% 3456 99% 3456 ms Write: 10% 3484 30% 3484 50% 3484 90% 3484 99% 3484 ms Write: 10% 3167 30% 3167 50% 3167 90% 3167 99% 3167 ms Write: 10% 3026 30% 3026 50% 3026 90% 3026 99% 3026 ms Write: 10% 3241 30% 3241 50% 3241 90% 3241 99% 3241 ms Write: 10% 3329 30% 3329 50% 3329 90% 3329 99% 3329 ms Write: 10% 2787 30% 2787 50% 2787 90% 2787 99% 2787 ms Write: 10% 2896 30% 2896 50% 2896 90% 2896 99% 2896 ms Write: 10% 2367 30% 2367 50% 2367 90% 2367 99% 2367 ms Write: 10% 2227 30% 2227 50% 2227 90% 2227 99% 2227 ms Write: 10% 2265 30% 2265 50% 2265 90% 2265 99% 2265 ms Write: 10% 2445 30% 2445 50% 2445 90% 2445 99% 2445 ms Write: 10% 2210 30% 2210 50% 2210 90% 2210 99% 2210 ms Write: 10% 2519 30% 2519 50% 2519 90% 2519 99% 2519 ms Write: 10% 1972 30% 1972 50% 1972 90% 1972 99% 1972 ms Write: 10% 2108 30% 2108 50% 2108 90% 2108 99% 2108 ms Write: 10% 2562 30% 2562 50% 2562 90% 2562 99% 2562 ms Write: 10% 1740 30% 1740 50% 1740 90% 1740 99% 1740 ms Write: 10% 3146 30% 3146 50% 3146 90% 3146 99% 3146 ms Write: 10% 1539 30% 1539 50% 1539 90% 1539 99% 1539 ms Write: 10% 2032 30% 2032 50% 2032 90% 2032 99% 2032 ms Write: 10% 2109 30% 2109 50% 2109 90% 2109 99% 2109 ms Write: 10% 1717 30% 1717 50% 1717 90% 1717 99% 1717 ms Write: 10% 2188 30% 2188 50% 2188 90% 2188 99% 2188 ms Write: 10% 1400 30% 1400 50% 1400 90% 1400 99% 1400 ms Write: 10% 1472 30% 1472 50% 1472 90% 1472 99% 1472 ms Write: 10% 1534 30% 1534 50% 1534 90% 1534 99% 1534 ms Write: 10% 1743 30% 1743 50% 1743 90% 1743 99% 1743 ms Write: 10% 1430 30% 1430 50% 1430 90% 1430 99% 1430 ms Write: 10% 1324 30% 1324 50% 1324 90% 1324 99% 1324 ms Write: 10% 1613 30% 1613 50% 1613 90% 1613 99% 1613 ms Write: 10% 1336 30% 1336 50% 1336 90% 1336 99% 1336 ms Write: 10% 1640 30% 1640 50% 1640 90% 1640 99% 1640 ms Write: 10% 1354 30% 1354 50% 1354 90% 1354 99% 1354 ms Read: 10% 10968 30% 10968 50% 10968 90% 10968 99% 10968 ms Step 3. write modify Write: 10% 3056 30% 3056 50% 3056 90% 3056 99% 3056 ms Write: 10% 7795 30% 7795 50% 7795 90% 7795 99% 7795 ms Write: 10% 7900 30% 7900 50% 7900 90% 7900 99% 7900 ms Write: 10% 7833 30% 7833 50% 7833 90% 7833 99% 7833 ms Write: 10% 7674 30% 7674 50% 7674 90% 7674 99% 7674 ms Write: 10% 7553 30% 7553 50% 7553 90% 7553 99% 7553 ms Write: 10% 7212 30% 7212 50% 7212 90% 7212 99% 7212 ms Write: 10% 7301 30% 7301 50% 7301 90% 7301 99% 7301 ms Write: 10% 7134 30% 7134 50% 7134 90% 7134 99% 7134 ms Write: 10% 7189 30% 7189 50% 7189 90% 7189 99% 7189 ms Write: 10% 7142 30% 7142 50% 7142 90% 7142 99% 7142 ms Write: 10% 7070 30 ... 5878 99% 5878 ms Write: 10% 5745 30% 5745 50% 5745 90% 5745 99% 5745 ms Write: 10% 5844 30% 5844 50% 5844 90% 5844 99% 5844 ms Write: 10% 5786 30% 5786 50% 5786 90% 5786 99% 5786 ms Write: 10% 6115 30% 6115 50% 6115 90% 6115 99% 6115 ms Write: 10% 5684 30% 5684 50% 5684 90% 5684 99% 5684 ms Write: 10% 5789 30% 5789 50% 5789 90% 5789 99% 5789 ms Write: 10% 4980 30% 4980 50% 4980 90% 4980 99% 4980 ms Write: 10% 5519 30% 5519 50% 5519 90% 5519 99% 5519 ms Write: 10% 4968 30% 4968 50% 4968 90% 4968 99% 4968 ms Write: 10% 5157 30% 5157 50% 5157 90% 5157 99% 5157 ms Write: 10% 5377 30% 5377 50% 5377 90% 5377 99% 5377 ms Write: 10% 4911 30% 4911 50% 4911 90% 4911 99% 4911 ms Write: 10% 5527 30% 5527 50% 5527 90% 5527 99% 5527 ms Write: 10% 4640 30% 4640 50% 4640 90% 4640 99% 4640 ms Write: 10% 4384 30% 4384 50% 4384 90% 4384 99% 4384 ms Write: 10% 4575 30% 4575 50% 4575 90% 4575 99% 4575 ms Write: 10% 4735 30% 4735 50% 4735 90% 4735 99% 4735 ms Write: 10% 4324 30% 4324 50% 4324 90% 4324 99% 4324 ms Write: 10% 4150 30% 4150 50% 4150 90% 4150 99% 4150 ms Write: 10% 4045 30% 4045 50% 4045 90% 4045 99% 4045 ms Write: 10% 4049 30% 4049 50% 4049 90% 4049 99% 4049 ms Write: 10% 3892 30% 3892 50% 3892 90% 3892 99% 3892 ms Write: 10% 3866 30% 3866 50% 3866 90% 3866 99% 3866 ms Write: 10% 3987 30% 3987 50% 3987 90% 3987 99% 3987 ms Write: 10% 3315 30% 3315 50% 3315 90% 3315 99% 3315 ms Write: 10% 3859 30% 3859 50% 3859 90% 3859 99% 3859 ms Write: 10% 3234 30% 3234 50% 3234 90% 3234 99% 3234 ms Write: 10% 3518 30% 3518 50% 3518 90% 3518 99% 3518 ms Write: 10% 3242 30% 3242 50% 3242 90% 3242 99% 3242 ms Write: 10% 3477 30% 3477 50% 3477 90% 3477 99% 3477 ms Write: 10% 3218 30% 3218 50% 3218 90% 3218 99% 3218 ms Write: 10% 3313 30% 3313 50% 3313 90% 3313 99% 3313 ms Write: 10% 3458 30% 3458 50% 3458 90% 3458 99% 3458 ms Write: 10% 3247 30% 3247 50% 3247 90% 3247 99% 3247 ms Write: 10% 3266 30% 3266 50% 3266 90% 3266 99% 3266 ms Write: 10% 3148 30% 3148 50% 3148 90% 3148 99% 3148 ms Update: 10% 645 30% 645 50% 645 90% 645 99% 645 ms Read: 10% 13702 30% 13702 50% 13702 90% 13702 99% 13702 ms Path read_update_write_load removed test_suffix, num 0, table path create_and_drop_tables start_time 1746416018.3150342 Path create_and_drop_tables removed File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/177e/0004e4/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 100, in test self._test_suffix(ctx, get_external_param("table_suffix", ""), exit_codes, 0) File "/home/runner/.ya/build/build_root/177e/0004e4/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 136, in _test_suffix ScenarioTestHelper(None).remove_path(ctx.test, ctx.suite) File "contrib/python/allure-python-commons/allure_commons/_allure.py", line 202, in impl return func(*a, **kw) File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 726, in remove_path self._run_with_expected_status( File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 343, in _run_with_expected_status result = operation() File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 727, in lambda: YdbCluster.get_ydb_driver().scheme_client.remove_directory(os.path.join(root_path, e.name)), File "contrib/python/ydb/py3/ydb/scheme.py", line 489, in remove_directory return self._driver( File "contrib/python/ydb/py3/ydb/tracing.py", line 70, in wrapper return f(self, *args, **kwargs) File "contrib/python/ydb/py3/ydb/pool.py", line 443, in __call__ res = connection( File "contrib/python/ydb/py3/ydb/connection.py", line 458, in __call__ response = rpc_state( File "contrib/python/ydb/py3/ydb/connection.py", line 242, in __call__ response, rendezvous = self.rpc.with_call(*args, **kwargs) File "contrib/python/grpcio/py3/grpc/_channel.py", line 1041, in with_call state, call, = self._blocking(request, timeout, metadata, credentials, File "contrib/python/grpcio/py3/grpc/_channel.py", line 1017, in _blocking event = call.next_event() File "contrib/tools/python3/Lib/threading.py", line 299, in __enter__ def __enter__(self): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/177e/0004e4/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/177e/0004e4', '--source-root', '/home/runner/.ya/build/build_root/177e/0004e4/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/177e/0004e4/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/177e/0004e4', '--source-root', '/home/runner/.ya/build/build_root/177e/0004e4/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/177e/0004e4/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 18 ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 6 tests (total:12.61s - setup:0.05s test:11.73s canon:0.51s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-relwithdebinfo] (1.79s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 5 - GOOD, 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:25.93s - test:25.67s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-relwithdebinfo] (23.08s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 214 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037889: 5) (72075186224037901: 5) (72075186224037902: 5) (72075186224037909: 5) (72075186224037932: 5) (72075186224037935: 5) (72075186224037944: 6) (72075186224037950: 5) (72075186224037955: 5) (72075186224037958: 5). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:231.63s - test:230.56s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (49.37s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (176.32s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 20 - GOOD, 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [19/40] chunk ran 2 tests (total:40.56s - test:40.47s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [default-linux-x86_64-relwithdebinfo] (17.15s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:466: in test_dlq_mechanics_in_cloud assert_that(len(self._read_single_message_no_wait(q1)), equal_to(0)) E AssertionError: E Expected: <0> E but: was <1> Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v0-std.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 75 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ydb/tests/functional/tenants [size:medium] nchunks:20 ------ [10/20] chunk ran 5 tests (total:63.32s - setup:0.02s test:63.12s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (14.99s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [11/20] chunk ran 5 tests (total:55.63s - setup:0.03s test:55.51s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (7.40s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [12/20] chunk ran 5 tests (total:293.77s - setup:0.01s test:293.45s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (78.16s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (73.61s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [2/20] chunk ran 6 tests (total:111.79s - setup:0.01s test:111.58s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (14.22s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (11.37s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ FAIL: 101 - GOOD, 6 - FAIL, 2 - XFAIL ydb/tests/functional/tenants ydb/tests/olap/data_quotas [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:629.14s - setup:0.01s test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_quota_exhaustion.py::TestYdbWorkload::test (timeout) duration: 626.99s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/stderr [timeout] test_quota_exhaustion.py::TestYdbWorkload::test [default-linux-x86_64-relwithdebinfo] (626.99s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ [2/10] chunk ran 1 test (total:220.69s - setup:0.01s test:220.52s) [fail] test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [default-linux-x86_64-relwithdebinfo] (218.13s) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:236: in test_duplicates self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, table_path, 0, retries=0), timeout_seconds=200) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:83: in upsert_until_overload assert time.time() <= deadline, "deadline exceeded" E AssertionError: deadline exceeded E assert 1746415064.5239296 <= 1746415063.7419457 E + where 1746415064.5239296 = () E + where = time.time Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test_duplicates.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/tests/olap/data_quotas ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:616.98s - setup:0.01s test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 174.15s test_alter_tiering.py::TestAlterTiering::test[many_tables] (good) duration: 149.30s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 96.89s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 74.64s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 57.57s test_simple.py::TestSimple::test[tablestores] (good) duration: 23.68s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (timeout) duration: 18.93s test_simple.py::TestSimple::test_multi[alter_table] (good) duration: 2.83s test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] (good) duration: 2.37s test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] (good) duration: 2.14s 8 more tests with 9.11s total duration are not listed. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [default-linux-x86_64-relwithdebinfo] (18.93s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_scheme_load.py.TestSchemeLoad.test.create_and_drop_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 17 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:615.01s - setup:0.01s test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 612.40s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (612.40s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [3/10] chunk ran 2 tests (total:459.08s - setup:0.01s test:458.94s) [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [default-linux-x86_64-relwithdebinfo] (268.73s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:277: in test_delete_s3_tiering raise Exception(".sys reports incorrect data portions") E Exception: .sys reports incorrect data portions Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_delete_s3_tiering.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 6 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:227.97s - test:227.22s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 61.6G (64598924K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1708976 44.8M 44.5M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1709355 32.7M 20.0M 7.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1709366 241M 241M 192M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:f 1711422 6.9G 6.9G 6.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711431 6.8G 6.7G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711432 6.8G 6.7G 6.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711433 7.6G 7.5G 7.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711434 6.8G 6.7G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711435 6.8G 6.7G 6.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711444 7.0G 6.8G 6.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711445 6.7G 6.6G 6.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1711446 6.7G 6.7G 6.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:287.19s - setup:0.02s test:284.92s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 9.3G (9701744K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1594886 44.8M 44.3M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1595227 32.8M 20.6M 8.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1595283 981M 960M 930M └─ ydb-tests-stress-simple_queue-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fa 1598072 951M 945M 698M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598078 1004M 963M 766M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598081 893M 882M 669M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598098 979M 956M 725M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598103 911M 881M 684M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598105 970M 939M 733M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598111 1.0G 1.0G 810M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598116 968M 933M 742M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1598122 959M 921M 734M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff/stderr Total 448 suites: 440 - GOOD 5 - FAIL 3 - TIMEOUT Total 13741 tests: 11094 - GOOD 13 - FAIL 2 - XFAIL 3 - TIMEOUT 2629 - SKIPPED Cache efficiency ratio is 95.08% (45204 of 47543). Local: 0 (0.00%), dist: 3060 (6.44%), by dynamic uids: 0 (0.00%), avoided: 42144 (88.64%) Dist cache download: count=2869, size=10.18 GiB, speed=151.58 MiB/s Disk usage for tools/sdk at least 155.22 MiB Additional disk space consumed for build cache 59.58 GiB Critical path: [ 618 ms] [CF] [20f9C3Iev1TOpW73h2_hFg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/sandbox.cpp [started: 0 (1746414188356), finished: 618 (1746414188974)] [ 1104 ms] [CC] [GqENgENKCpXvzmkkepKrNw default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/sandbox.cpp [started: 36999 (1746414225355), finished: 38103 (1746414226459)] [ 53 ms] [AR] [I_WBwUTtgQjTzWqSlSKJPg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/liblibrary-cpp-build_info.a [started: 55201 (1746414243557), finished: 55254 (1746414243610)] [ 15272 ms] [LD] [_YquJ-MZYwmNfAa4f-xR2g default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 63736 (1746414252092), finished: 79008 (1746414267364)] [615532 ms] [TM] [rnd-14730632986262778215 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 868827 (1746415057183), finished: 1484359 (1746415672715)] [ 33761 ms] [TA] [rnd-nbfaqlmesftft62z]: $(BUILD_ROOT)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} [started: 1484381 (1746415672737), finished: 1518142 (1746415706498)] Time from start: 1873108.5869140625 ms, time elapsed by graph 666340 ms, time diff 1206768.5869140625 ms. The longest 10 tasks: [629586 ms] [TM] [rnd-16930594527855627867 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746414349350, finished: 1746414978936] [617419 ms] [TM] [rnd-6n3gfur4uzvugmqf default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746415422078, finished: 1746416039497] [615532 ms] [TM] [rnd-14730632986262778215 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746415057183, finished: 1746415672715] [513217 ms] [TM] [rnd-ajd5d7wr5i6vmbbx default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/column_family/compression/py3test [started: 1746415525048, finished: 1746416038265] [505897 ms] [TM] [rnd-3184947245417719684 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/ydb_cli/py3test [started: 1746415259387, finished: 1746415765284] [483890 ms] [TM] [rnd-xx4q6h959ygyvf9g default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/py3test [started: 1746415496797, finished: 1746415980687] [459612 ms] [TM] [rnd-12976608097337672877 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746415061092, finished: 1746415520704] [446721 ms] [TM] [rnd-7422851683531438418 default-linux-x86_64 relwithdebinfo]: ydb/tests/fq/s3/py3test [started: 1746415292687, finished: 1746415739408] [423160 ms] [TM] [rnd-12395960163156958667 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/serverless/py3test [started: 1746415263607, finished: 1746415686767] [411891 ms] [TM] [rnd-6654934400658239179 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/serverless/py3test [started: 1746415240839, finished: 1746415652730] Total time by type: [71440829 ms] [TM] [count: 1741, ave time 41034.36 msec] [ 7371969 ms] [prepare:get from dist cache] [count: 3060, ave time 2409.14 msec] [ 4819116 ms] [prepare:put to dist cache] [count: 188, ave time 25633.60 msec] [ 1838623 ms] [TS] [count: 305, ave time 6028.27 msec] [ 440920 ms] [LD] [count: 163, ave time 2705.03 msec] [ 413725 ms] [prepare:tools] [count: 20, ave time 20686.25 msec] [ 403600 ms] [TA] [count: 82, ave time 4921.95 msec] [ 258058 ms] [prepare:bazel-store] [count: 3, ave time 86019.33 msec] [ 73011 ms] [prepare:put into local cache, clean build dir] [count: 3079, ave time 23.71 msec] [ 59675 ms] [prepare:AC] [count: 4, ave time 14918.75 msec] [ 26510 ms] [PY] [count: 15, ave time 1767.33 msec] [ 5773 ms] [CC] [count: 7, ave time 824.71 msec] [ 5446 ms] [prepare:resources] [count: 2, ave time 2723.00 msec] [ 2223 ms] [AR] [count: 7, ave time 317.57 msec] [ 1958 ms] [UN] [count: 3, ave time 652.67 msec] [ 1424 ms] [SB] [count: 2, ave time 712.00 msec] [ 1264 ms] [PK] [count: 2, ave time 632.00 msec] [ 1199 ms] [CF] [count: 2, ave time 599.50 msec] [ 1134 ms] [ld] [count: 2, ave time 567.00 msec] [ 935 ms] [BN] [count: 6, ave time 155.83 msec] [ 608 ms] [BI] [count: 1, ave time 608.00 msec] [ 402 ms] [CP] [count: 1, ave time 402.00 msec] [ 11 ms] [prepare:clean] [count: 3, ave time 3.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 73683052 ms (99.35%) Total run tasks time - 74167402 ms Configure time - 27.1 s Statistics overhead 1261 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.jvIwrReDhY --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 1.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export | 2.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun | 3.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool | 5.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut | 7.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load | 8.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |11.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |14.3%| PREPARE $(LLD_ROOT-3808007503) |15.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |17.2%| PREPARE $(CLANG-1922233694) |19.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |21.2%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |21.7%| PREPARE $(FLAKE8_PY3-715603131) |23.2%| PREPARE $(CLANG-874354456) |24.1%| PREPARE $(CLANG16-1380963495) |24.6%| PREPARE $(PYTHON) |25.1%| PREPARE $(YMAKE_PYTHON3-4256832079) |27.1%| PREPARE $(GDB) |28.1%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |28.6%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |29.1%| PREPARE $(CLANG_FORMAT-1286082657) |30.0%| PREPARE $(CLANG18-1866954364) |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |31.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |32.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |35.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |36.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |37.4%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |37.9%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |34.0%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |34.5%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |35.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |35.7%| COMPACTING CACHE 59.6GiB |36.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |36.6%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |37.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |37.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |37.9%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |38.3%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |38.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |39.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |39.6%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |40.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |40.4%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |40.9%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |41.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |41.7%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |42.1%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |42.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |43.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |43.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |43.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] |45.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |45.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] |46.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |46.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |46.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |47.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] |47.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |48.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |48.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |49.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] |49.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |50.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |50.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] |51.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |51.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |51.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |52.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |53.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |55.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |61.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |61.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |64.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates >> test_quota_exhaustion.py::TestYdbWorkload::test_delete |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |65.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |69.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |70.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |70.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |71.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> unstable_connection.py::TestUnstableConnection::test |71.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |71.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |72.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |73.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |74.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> data_correctness.py::TestDataCorrectness::test |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] |78.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_workload.py::TestYdbWorkload::test >> test_workload.py::TestYdbWorkload::test[row] |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] 2025-05-05 03:47:32,283 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |83.8%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |86.4%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |86.8%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |87.7%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] |89.4%| [TA] $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} |89.8%| [TA] {RESULT} $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/00004e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/00004e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1766572 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] >> data_correctness.py::TestDataCorrectness::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000052/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000052/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1771525 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. |91.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> unstable_connection.py::TestUnstableConnection::test [GOOD] |92.3%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert: got overload issue delete #0 ok delete #1 ok delete #2 ok delete #3 ok delete #4 ok delete #5 ok delete #6 ok delete #7 ok delete #8 ok delete #9 ok delete #10 ok delete #11 ok delete #12 ok delete #13 ok delete #14 ok delete #15 ok delete #16 ok delete #17 ok delete #18 ok delete #19 ok delete #20 ok delete #21 ok delete #22 ok delete #23 ok delete #24 ok delete #25 ok delete #26 ok delete #27 ok delete #28 ok delete #29 ok delete #30 ok delete #31 ok delete #32 ok delete #33 ok delete #34 ok delete #35 ok delete #36 ok delete #37 ok delete #38 ok delete #39 ok delete #40 ok delete #41 ok delete #42 ok delete #43 ok delete #44 ok delete #45 ok delete #46 ok delete #47 ok delete #48 ok delete #49 ok delete #50 ok delete #51 ok delete #52 ok delete #53 ok delete #54 ok delete #55 ok delete #56 ok delete #57 ok delete #58 ok delete #59 ok delete #60 ok delete #61 ok delete #62 ok delete #63 ok delete #64 ok delete #65 ok delete #66 ok delete #67 ok delete #68 ok delete #69 ok delete #70 ok delete #71 ok delete #72 ok delete #73 ok delete #74 ok delete #75 ok delete #76 ok delete #77 ok delete #78 ok delete #79 ok delete #80 ok delete #81 ok delete #82 ok delete #83 ok delete #84 ok delete #85 ok delete #86 ok delete #87 ok delete #88 ok delete #89 ok delete #90 ok delete #91 ok delete #92 ok delete #93 ok delete #94 ok delete #95 ok delete #96 ok delete #97 ok delete #98 ok delete #99 ok delete #100 ok delete #101 ok delete #102 ok delete #103 ok delete #104 ok delete #105 ok delete #106 ok delete #107 ok delete #108 ok delete #109 ok delete #110 ok delete #111 ok delete #112 ok delete #113 ok delete #114 ok delete #115 ok delete #116 ok delete #117 ok delete #118 ok delete #119 ok delete #120 ok delete #121 ok delete #122 ok delete #123 ok delete #124 ok delete #125 ok delete #126 ok delete #127 ok delete #128 ok delete #129 ok delete #130 ok delete #131 ok delete #132 ok delete #133 ok delete #134 ok delete #135 ok delete #136 ok delete #137 ok delete #138 ok delete #139 ok delete #140 ok delete #141 ok delete #142 ok delete #143 ok delete #144 ok delete #145 ok delete #146 ok delete #147 ok delete #148 ok delete #149 ok delete #150 ok delete #151 ok delete #152 ok delete #153 ok delete #154 ok delete #155 ok delete #156 ok delete #157 ok delete #158 ok delete #159 ok delete #160 ok delete #161 ok delete #162 ok delete #163 ok delete #164 ok delete #165 ok delete #166 ok delete #167 ok delete #168 ok delete #169 ok delete #170 ok delete #171 ok delete #172 ok delete #173 ok delete #174 ok delete #175 ok delete #176 ok delete #177 ok delete #178 ok delete #179 ok delete #180 ok delete #181 ok delete #182 ok delete #183 ok delete #184 ok delete #185 ok delete #186 ok delete #187 ok delete #188 ok delete #189 ok delete #190 ok delete #191 ok delete #192 ok delete #193 ok delete #194 ok delete #195 ok delete #196 ok delete #197 ok delete #198 ok delete #199 ok delete #200 ok delete #201 ok delete #202 ok delete #203 ok delete #204 ok delete #205 ok delete #206 ok delete #207 ok delete #208 ok delete #209 ok delete #210 ok delete #211 ok delete #212 ok delete #213 ok delete #214 ok delete #215 ok delete #216 ok delete #217 ok delete #218 ok delete #219 ok delete #220 ok delete #221 ok delete #222 ok delete #223 ok delete #224 ok delete #225 ok delete #226 ok delete #227 ok delete #228 ok delete #229 ok delete #230 ok delete #231 ok delete #232 ok delete #233 ok delete #234 ok delete #235 ok delete #236 ok delete #237 ok delete #238 ok delete #239 ok delete #240 ok delete #241 ok delete #242 ok delete #243 ok delete #244 ok delete #245 ok delete #246 ok delete #247 ok delete #248 ok delete #249 ok delete #250 ok delete #251 ok delete #252 ok delete #253 ok delete #254 ok delete #255 ok delete #256 ok delete #257 ok delete #258 ok delete #259 ok delete #260 ok delete #261 ok delete #262 ok delete #263 ok delete #264 ok delete #265 ok delete #266 ok delete #267 ok delete #268 ok delete #269 ok delete #270 ok delete #271 ok delete #272 ok delete #273 ok delete #274 ok delete #275 ok delete #276 ok delete #277 ok delete #278 ok delete #279 ok delete #280 ok delete #281 ok delete #282 ok delete #283 ok delete #284 ok delete #285 ok delete #286 ok delete #287 ok delete #288 ok delete #289 ok delete #290 ok delete #291 ok delete #292 ok delete #293 ok delete #294 ok delete #295 ok delete #296 ok delete #297 ok delete #298 ok delete #299 ok delete #300 ok delete #301 ok delete #302 ok delete #303 ok delete #304 ok delete #305 ok delete #306 ok delete #307 ok delete #308 ok delete #309 ok delete #310 ok delete #311 ok delete #312 ok delete #313 ok delete #314 ok delete #315 ok delete #316 ok delete #317 ok delete #318 ok delete #319 ok delete #320 ok delete #321 ok delete #322 ok delete #323 ok delete #324 ok delete #325 ok delete #326 ok delete #327 ok delete #328 ok delete #329 ok delete #330 ok delete #331 ok delete #332 ok delete #333 ok delete #334 ok delete #335 ok delete #336 ok delete #337 ok delete #338 ok delete #339 ok delete #340 ok delete #341 ok delete #342 ok delete #343 ok delete #344 ok delete #345 ok delete #346 ok delete #347 ok delete #348 ok delete #349 ok delete #350 ok delete #351 ok delete #352 ok delete #353 ok delete #354 ok delete #355 ok delete #356 ok delete #357 ok delete #358 ok delete #359 ok delete #360 ok delete #361 ok delete #362 ok delete #363 ok delete #364 ok delete #365 ok delete #366 ok delete #367 ok delete #368 ok delete #369 ok delete #370 ok delete #371 ok delete #372 ok delete #373 ok delete #374 ok delete #375 ok delete #376 ok delete #377 ok delete #378 ok delete #379 ok delete #380 ok delete #381 ok delete #382 ok delete #383 ok delete #384 ok delete #385 ok delete #386 ok delete #387 ok delete #388 ok delete #389 ok delete #390 ok delete #391 ok delete #392 ok delete #393 ok delete #394 ok delete #395 ok delete #396 ok delete #397 ok delete #398 ok delete #399 ok delete #400 ok delete #401 ok delete #402 ok delete #403 ok delete #404 ok delete #405 ok delete #406 ok delete #407 ok delete #408 ok delete #409 ok delete #410 ok delete #411 ok delete #412 ok delete #413 ok delete #414 ok delete #415 ok delete #416 ok delete #417 ok delete #418 ok delete #419 ok delete #420 ok delete #421 ok delete #422 ok delete #423 ok delete #424 ok delete #425 ok delete #426 ok delete #427 ok delete #428 ok delete #429 ok delete #430 ok delete #431 ok delete #432 ok delete #433 ok delete #434 ok delete #435 ok delete #436 ok delete #437 ok delete #438 ok delete #439 ok delete #440 ok delete #441 ok delete #442 ok delete #443 ok delete #444 ok delete #445 ok delete #446 ok delete #447 ok delete #448 ok delete #449 ok delete #450 ok delete #451 ok delete #452 ok delete #453 ok delete #454 ok delete #455 ok delete #456 ok delete #457 ok delete #458 ok delete #459 ok delete #460 ok delete #461 ok delete #462 ok delete #463 ok delete #464 ok delete #465 ok delete #466 ok delete #467 ok delete #468 ok delete #469 ok delete #470 ok delete #471 ok delete #472 ok delete #473 ok delete #474 ok delete #475 ok delete #476 ok delete #477 ok delete #478 ok delete #479 ok delete #480 ok delete #481 ok delete #482 ok delete #483 ok delete #484 ok delete #485 ok delete #486 ok delete #487 ok delete #488 ok delete #489 ok delete #490 ok delete #491 ok delete #492 ok delete #493 ok delete #494 ok delete #495 ok delete #496 ok delete #497 ok delete #498 ok delete #499 ok ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000050/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000050/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1771063 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000055/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000055/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1770608 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000054/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/000054/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {'__DEFAULT': 100000}, portions: 2 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1766424 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] >> test_workload.py::TestYdbWorkload::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |95.3%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] |95.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |96.2%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/00004d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/00004d/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1771539 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_quota_exhaustion.py::TestYdbWorkload::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test [GOOD] Test command err: upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert #103 ok, result: [] Quota exceeded False upsert #104 ok, result: [] Quota exceeded False upsert #105 ok, result: [] Quota exceeded False upsert #106 ok, result: [] Quota exceeded False upsert #107 ok, result: [] Quota exceeded False upsert #108 ok, result: [] Quota exceeded False upsert #109 ok, result: [] Quota exceeded False upsert #110 ok, result: [] Quota exceeded False upsert #111 ok, result: [] Quota exceeded False upsert #112 ok, result: [] Quota exceeded False upsert #113 ok, result: [] Quota exceeded False upsert #114 ok, result: [] Quota exceeded False upsert #115 ok, result: [] Quota exceeded False upsert #116 ok, result: [] Quota exceeded False upsert #117 ok, result: [] Quota exceeded False upsert #118 ok, result: [] Quota exceeded False upsert #119 ok, result: [] Quota exceeded False upsert #120 ok, result: [] Quota exceeded False upsert #121 ok, result: [] Quota exceeded False upsert #122 ok, result: [] Quota exceeded False upsert #123 ok, result: [] Quota exceeded False upsert #124 ok, result: [] Quota exceeded False upsert #125 ok, result: [] Quota exceeded False upsert #126 ok, result: [] Quota exceeded False upsert #127 ok, result: [] Quota exceeded False upsert #128 ok, result: [] Quota exceeded False upsert #129 ok, result: [] Quota exceeded False upsert #130 ok, result: [] Quota exceeded False upsert #131 ok, result: [] Quota exceeded False upsert #132 ok, result: [] Quota exceeded False upsert #133 ok, result: [] Quota exceeded False upsert #134 ok, result: [] Quota exceeded False upsert #135 ok, result: [] Quota exceeded False upsert #136 ok, result: [] Quota exceeded False upsert #137 ok, result: [] Quota exceeded False upsert #138 ok, result: [] Quota exceeded False upsert #139 ok, result: [] Quota exceeded False upsert #140 ok, result: [] Quota exceeded False upsert #141 ok, result: [] Quota exceeded False upsert #142 ok, result: [] Quota exceeded False upsert #143 ok, result: [] Quota exceeded False upsert #144 ok, result: [] Quota exceeded False upsert #145 ok, result: [] Quota exceeded False upsert #146 ok, result: [] Quota exceeded False upsert #147 ok, result: [] Quota exceeded False upsert #148 ok, result: [] Quota exceeded False upsert #149 ok, result: [] Quota exceeded False upsert #150 ok, result: [] Quota exceeded False upsert #151 ok, result: [] Quota exceeded False upsert #152 ok, result: [] Quota exceeded False upsert #153 ok, result: [] Quota exceeded False upsert #154 ok, result: [] Quota exceeded False upsert #155 ok, result: [] Quota exceeded False upsert #156 ok, result: [] Quota exceeded False upsert #157 ok, result: [] Quota exceeded False upsert #158 ok, result: [] Quota exceeded False upsert #159 ok, result: [] Quota exceeded False upsert #160 ok, result: [] Quota exceeded False upsert #161 ok, result: [] Quota exceeded False upsert #162 ok, result: [] Quota exceeded False upsert #163 ok, result: [] Quota exceeded False upsert #164 ok, result: [] Quota exceeded False upsert #165 ok, result: [] Quota exceeded False upsert #166 ok, result: [] Quota exceeded False upsert #167 ok, result: [] Quota exceeded False upsert #168 ok, result: [] Quota exceeded False upsert #169 ok, result: [] Quota exceeded False upsert #170 ok, result: [] Quota exceeded False upsert #171 ok, result: [] Quota exceeded False upsert #172 ok, result: [] Quota exceeded False upsert #173 ok, result: [] Quota exceeded False upsert #174 ok, result: [] Quota exceeded False upsert #175 ok, result: [] Quota exceeded False upsert #176 ok, result: [] Quota exceeded False upsert #177 ok, result: [] Quota exceeded False upsert #178 ok, result: [] Quota exceeded False upsert #179 ok, result: [] Quota exceeded False upsert #180 ok, result: [] Quota exceeded False upsert #181 ok, result: [] Quota exceeded False upsert #182 ok, result: [] Quota exceeded False upsert #183 ok, result: [] Quota exceeded False upsert #184 ok, result: [] Quota exceeded False upsert #185 ok, result: [] Quota exceeded False upsert #186 ok, result: [] Quota exceeded False upsert #187 ok, result: [] Quota exceeded False upsert #188 ok, result: [] Quota exceeded False upsert #189 ok, result: [] Quota exceeded False upsert #190 ok, result: [] Quota exceeded False upsert #191 ok, result: [] Quota exceeded False upsert #192 ok, result: [] Quota exceeded False upsert #193 ok, result: [] Quota exceeded False upsert #194 ok, result: [] Quota exceeded False upsert #195 ok, result: [] Quota exceeded False upsert #196 ok, result: [] Quota exceeded False upsert #197 ok, result: [] Quota exceeded False upsert #198 ok, result: [] Quota exceeded False upsert #199 ok, result: [] Quota exceeded False upsert #200 ok, result: [] Quota exceeded False upsert #201 ok, result: [] Quota exceeded False upsert #202 ok, result: [] Quota exceeded False upsert #203 ok, result: [] Quota exceeded False upsert #204 ok, result: [] Quota exceeded False upsert #205 ok, result: [] Quota exceeded False upsert #206 ok, result: [] Quota exceeded False upsert #207 ok, result: [] Quota exceeded False upsert #208 ok, result: [] Quota exceeded False upsert #209 ok, result: [] Quota exceeded False upsert #210 ok, result: [] Quota exceeded False upsert #211 ok, result: [] Quota exceeded False upsert #212 ok, result: [] Quota exceeded False upsert #213 ok, result: [] Quota exceeded False upsert #214 ok, result: [] Quota exceeded False upsert #215 ok, result: [] Quota exceeded False upsert #216 ok, result: [] Quota exceeded False upsert #217 ok, result: [] Quota exceeded False upsert #218 ok, result: [] Quota exceeded False upsert #219 ok, result: [] Quota exceeded False upsert #220 ok, result: [] Quota exceeded False upsert #221 ok, result: [] Quota exceeded False upsert #222 ok, result: [] Quota exceeded False upsert #223 ok, result: [] Quota exceeded False upsert #224 ok, result: [] Quota exceeded False upsert #225 ok, result: [] Quota exceeded False upsert #226 ok, result: [] Quota exceeded False upsert #227 ok, result: [] Quota exceeded False upsert #228 ok, result: [] Quota exceeded False upsert #229 ok, result: [] Quota exceeded False upsert #230 ok, result: [] Quota exceeded False upsert #231 ok, result: [] Quota exceeded False upsert #232 ok, result: [] Quota exceeded False upsert #233 ok, result: [] Quota exceeded False upsert #234 ok, result: [] Quota exceeded False upsert #235 ok, result: [] Quota exceeded False upsert #236 ok, result: [] Quota exceeded False upsert #237 ok, result: [] Quota exceeded False upsert #238 ok, result: [] Quota exceeded False upsert #239 ok, result: [] Quota exceeded False upsert #240 ok, result: [] Quota exceeded False upsert #241 ok, result: [] Quota exceeded False upsert #242 ok, result: [] Quota exceeded False upsert #243 ok, result: [] Quota exceeded False upsert #244 ok, result: [] Quota exceeded False upsert #245 ok, result: [] Quota exceeded False upsert #246 ok, result: [] Quota exceeded False upsert #247 ok, result: [] Quota exceeded False upsert #248 ok, result: [] Quota exceeded False upsert #249 ok, result: [] Quota exceeded False upsert #250 ok, result: [] Quota exceeded False upsert #251 ok, result: [] Quota exceeded False upsert #252 ok, result: [] Quota exceeded False upsert #253 ok, result: [] Quota exceeded False upsert: got overload issue |97.4%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |97.9%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-05 03:57:27,448 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 03:57:27,510 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1761679 168M 170M 113M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/4zc4/00004f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1764323 944M 806M 681M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk 1771195 117M 114M 89.4M └─ moto_server s3 --port 27015 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/4zc4/00004f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4zc4/00004f', '--source-root', '/home/runner/.ya/build/build_root/4zc4/00004f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/4zc4/00004f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4zc4/00004f', '--source-root', '/home/runner/.ya/build/build_root/4zc4/00004f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4zc4/00004f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |98.7%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-05-05 03:57:29,642 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 03:57:29,794 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1763997 3.0G 3.0G 3.0G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/4zc4/00003a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1786925 1.4G 1.4G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alt Test command err: test_suffix, num 0, table path read_data_during_bulk_upsert0 start_time 1746416854.0883336 test_suffix, num 1, table path read_data_during_bulk_upsert1 start_time 1746416854.088665 test_suffix, num 2, table path read_data_during_bulk_upsert2 start_time 1746416854.0898957 test_suffix, num 3, table path read_data_during_bulk_upsert3 start_time 1746416854.0911436 test_suffix, num 5, table path read_data_during_bulk_upsert5 start_time 1746416854.0928476 test_suffix, num 4, table path read_data_during_bulk_upsert4 start_time 1746416854.0914524 test_suffix, num 6, table path read_data_during_bulk_upsert6 start_time 1746416854.09481 test_suffix, num 7, table path read_data_during_bulk_upsert7 start_time 1746416854.0954537 test_suffix, num 8, table path read_data_during_bulk_upsert8 start_time 1746416854.0988941 test_suffix, num 9, table path read_data_during_bulk_upsert9 start_time 1746416854.1012723 Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert8 removed Path read_data_during_bulk_upsert2 removed Path read_data_during_bulk_upsert7 removed Path read_data_during_bulk_upsert3 removed Path read_data_during_bulk_upsert0 removed Path read_data_during_bulk_upsert6 removed Path read_data_during_bulk_upsert5 removed Path read_data_during_bulk_upsert1 removed Path read_data_during_bulk_upsert4 removed Path read_data_during_bulk_upsert9 removed Path read_data_during_bulk_upsert8 removed test_suffix, num 0, table path read_data_during_bulk_upsert start_time 1746416974.2300134 Path read_data_during_bulk_upsert removed Path read_data_during_bulk_upsert removed test_suffix, num 0, table path alter_compression start_time 1746417114.3086581 Path alter_compression removed Path alter_compression removed test_suffix, num 0, table path read_update_write_load start_time 1746417299.9368312 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 3399 30% 3399 50% 3399 90% 3399 99% 3399 ms Write: 10% 6156 30% 6156 50% 6156 90% 6156 99% 6156 ms Write: 10% 6690 30% 6690 50% 6690 90% 6690 99% 6690 ms Write: 10% 6485 30% 6485 50% 6485 90% 6485 99% 6485 ms Write: 10% 6762 30% 6762 50% 6762 90% 6762 99% 6762 ms Write: 10% 6281 30% 6281 50% 6281 90% 6281 99% 6281 ms Write: 10% 6139 30% 6139 50% 6139 90% 6139 99% 6139 ms Write: 10% 6392 30% 6392 50% 6392 90% 6392 99% 6392 ms Write: 10% 5910 30% 5910 50% 5910 90% 5910 99% 5910 ms Write: 10% 5856 30% 5856 50% 5856 90% 5856 99% 5856 ms Write: 10% 5892 30% 5892 50% 5892 90% 5892 99% 5892 ms Write: 10% 5801 30% 5801 50% 5801 90% 5801 99% 5801 ms Write: 10% 5732 30% 5732 50% 5732 90% 5732 99% 5732 ms Write: 10% 5656 30% 5656 50% 5656 90% 5656 99% 5656 ms Write: 10% 5447 30% 5447 50% 5447 90% 5447 99% 5447 ms Write: 10% 5436 30% 5436 50% 5436 90% 5436 99% 5436 ms Write: 10% 5384 30% 5384 50% 5384 90% 5384 99% 5384 ms Write: 10% 5112 30% 5112 50% 5112 90% 5112 99% 5112 ms Write: 10% 5030 30% 5030 50% 5030 90% 5030 99% 5030 ms Write: 10% 5051 30% 5051 50% 5051 90% 5051 99% 5051 ms Write: 10% 4871 30% 4871 50% 4871 90% 4871 99% 4871 ms Write: 10% 4867 30% 4867 50% 4867 90% 4867 99% 4867 ms Write: 10% 4856 30% 4856 50% 4856 90% 4856 99% 4856 ms Write: 10% 4970 30% 4970 50% 4970 90% 4970 99% 4970 ms Write: 10% 4150 30% 4150 50% 4150 90% 4150 99% 4150 ms Write: 10% 4375 30% 4375 50% 4375 90% 4375 99% 4375 ms Write: 10% 4086 30% 4086 50% 4086 90% 4086 99% 4086 ms Write: 10% 4921 30% 4921 50% 4921 90% 4921 99% 4921 ms Write: 10% 3497 30% 3497 50% 3497 90% 3497 99% 3497 ms Write: 10% 3786 30% 3786 50% 3786 90% 3786 99% 3786 ms Write: 10% 4326 30% 4326 50% 4326 90% 4326 99% 4326 ms Write: 10% 3779 30% 3779 50% 3779 90% 3779 99% 3779 ms Write: 10% 3582 30% 3582 50% 3582 90% 3582 99% 3582 ms Write: 10% 3584 30% 3584 50% 3584 90% 3584 99% 3584 ms Write: 10% 3220 30% 3220 50% 3220 90% 3220 99% 3220 ms Write: 10% 2931 30% 2931 50% 2931 90% 2931 99% 2931 ms Write: 10% 3417 30% 3417 50% 3417 90% 3417 99% 3417 ms Write: 10% 2362 30% 2362 50% 2362 90% 2362 99% 2362 ms Write: 10% 3037 30% 3037 50% 3037 90% 3037 99% 3037 ms Write: 10% 2988 30% 2988 50% 2988 90% 2988 99% 2988 ms Write: 10% 2614 30% 2614 50% 2614 90% 2614 99% 2614 ms Write: 10% 2525 30% 2525 50% 2525 90% 2525 99% 2525 ms Write: 10% 2817 30% 2817 50% 2817 90% 2817 99% 2817 ms Write: 10% 2675 30% 2675 50% 2675 90% 2675 99% 2675 ms Write: 10% 1854 30% 1854 50% 1854 90% 1854 99% 1854 ms Write: 10% 1890 30% 1890 50% 1890 90% 1890 99% 1890 ms Write: 10% 1938 30% 1938 50% 1938 90% 1938 99% 1938 ms Write: 10% 1809 30% 1809 50% 1809 90% 1809 99% 1809 ms Write: 10% 2106 30% 2106 50% 2106 90% 2106 99% 2106 ms Write: 10% 2137 30% 2137 50% 2137 90% 2137 99% 2137 ms Write: 10% 1631 30% 1631 50% 1631 90% 1631 99% 1631 ms Write: 10% 1396 30% 1396 50% 1396 90% 1396 99% 1396 ms Write: 10% 1602 30% 1602 50% 1602 90% 1602 99% 1602 ms Write: 10% 1308 30% 1308 50% 1308 90% 1308 99% 1308 ms Write: 10% 1260 30% 1260 50% 1260 90% 1260 99% 1260 ms Write: 10% 1278 30% 1278 50% 1278 90% 1278 99% 1278 ms Write: 10% 1292 30% 1292 50% 1292 90% 1292 99% 1292 ms Write: 10% 1271 30% 1271 50% 1271 90% 1271 99% 1271 ms Write: 10% 1573 30% 1573 50% 1573 90% 1573 99% 1573 ms Write: 10% 1354 30% 1354 50% 1354 90% 1354 99% 1354 ms Write: 10% 2729 30% 2729 50% 2729 90% 2729 99% 2729 ms Write: 10% 1601 30% 1601 50% 1601 90% 1601 99% 1601 ms Write: 10% 1300 30% 1300 50% 1300 90% 1300 99% 1300 ms Write: 10% 1656 30% 1656 50% 1656 90% 1656 99% 1656 ms Step 2. read write Write: 10% 6472 30% 6472 50% 6472 90% 6472 99% 6472 ms Write: 10% 6428 30% 6428 50% 6428 90% 6428 99% 6428 ms Write: 10% 6658 30% 6658 50% 6658 90% 6658 99% 6658 ms Write: 10% 6619 30% 6619 50% 6619 90% 6619 99% 6619 ms Write: 10% 6513 30% 6513 50% 6513 90% 6513 99% 6513 ms Write: 10% 6339 30% 6339 50% 6339 90% 6339 99% 6339 ms Write: 10% 6363 30% 6363 50% 6363 90% 6363 99% 6363 ms Write: 10% 6298 30% 6298 50% 6298 90% 6298 99% 6298 ms Write: 10% 5898 30% 5898 50% 5898 90% 5898 99% 5898 ms Write: 10% 5520 30% 5520 50% 5520 90% 5520 99% 5520 ms Write: 10% 5843 30% 5843 50% 5843 90% 5843 99% 5843 ms Write: 10% 5501 30% 5501 50% 5501 90% 5501 99% 5501 ms Write: 10% 5496 30% 5496 50% 5496 90% 5496 99% 5496 ms Write: 10% 5296 30% 5296 50% 5296 90% 5296 99% 5296 ms Write: 10% 5304 30% 5304 50% 5304 90% 5304 99% 5304 ms Write: 10% 5064 30% 5064 50% 5064 90% 5064 99% 5064 ms Write: 10% 5302 30% 5302 50% 5302 90% 5302 99% 5302 ms Write: 10% 4970 30% 4970 50% 4970 90% 4970 99% 4970 ms Write: 10% 5171 30% 5171 50% 5171 90% 5171 99% 5171 ms Write: 10% 4900 30% 4900 50% 4900 90% 4900 99% 4900 ms Write: 10% 4995 30% 4995 50% 4995 90% 4995 99% 4995 ms Write: 10% 4863 30% 4863 50% 4863 90% 4863 99% 4863 ms Write: 10% 4734 30% 4734 50% 4734 90% 4734 99% 4734 ms Write: 10% 4934 30% 4934 50% 4934 90% 4934 99% 4934 ms Write: 10% 4523 30% 4523 50% 4523 90% 4523 99% 4523 ms Write: 10% 4276 30% 4276 50% 4276 90% 4276 99% 4276 ms Write: 10% 4527 30% 4527 50% 4527 90% 4527 99% 4527 ms Write: 10% 4190 30% 4190 50% 4190 90% 4190 99% 4190 ms Write: 10% 4238 30% 4238 50% 4238 90% 4238 99% 4238 ms Write: 10% 4129 30% 4129 50% 4129 90% 4129 99% 4129 ms Write: 10% 4170 30% 4170 50% 4170 90% 4170 99% 4170 ms Write: 10% 4068 30% 4068 50% 4068 90% 4068 99% 4068 ms Write: 10% 3964 30% 3964 50% 3964 90% 3964 99% 3964 ms Write: 10% 3831 30% 3831 50% 3831 90% 3831 99% 3831 ms Write: 10% 3855 30% 3855 50% 3855 90% 3855 99% 3855 ms Write: 10% 3690 30% 3690 50% 3690 90% 3690 99% 3690 ms Write: 10% 3639 30% 3639 50% 3639 90% 3639 99% 3639 ms Write: 10% 3443 30% 3443 50% 3443 90% 3443 99% 3443 ms Write: 10% 2746 30% 2746 50% 2746 90% 2746 99% 2746 ms Write: 10% 3043 30% 3043 50% 3043 90% 3043 99% 3043 ms Write: 10% 3417 30% 3417 50% 3417 90% 3417 99% 3417 ms Write: 10% 3059 30% 3059 50% 3059 90% 3059 99% 3059 ms Write: 10% 2620 30% 2620 50% 2620 90% 2620 99% 2620 ms Write: 10% 2566 30% 2566 50% 2566 90% 2566 99% 2566 ms Write: 10% 2538 30% 2538 50% 2538 90% 2538 99% 2538 ms Write: 10% 2253 30% 2253 50% 2253 90% 2253 99% 2253 ms Write: 10% 2457 30% 2457 50% 2457 90% 2457 99% 2457 ms Write: 10% 2382 30% 2382 50% 2382 90% 2382 99% 2382 ms Write: 10% 2441 30% 2441 50% 2441 90% 2441 99% 2441 ms Write: 10% 2330 30% 2330 50% 2330 90% 2330 99% 2330 ms Write: 10% 2312 30% 2312 50% 2312 90% 2312 99% 2312 ms Write: 10% 2233 30% 2233 50% 2233 90% 2233 99% 2233 ms Write: 10% 1856 30% 1856 50% 1856 90% 1856 99% 1856 ms Write: 10% 1755 30% 1755 50% 1755 90% 1755 99% 1755 ms Write: 10% 1265 30% 1265 50% 1265 90% 1265 99% 1265 ms Write: 10% 1661 30% 1661 50% 1661 90% 1661 99% 1661 ms Write: 10% 1446 30% 1446 50% 1446 90% 1446 99% 1446 ms Write: 10% 1408 30% 1408 50% 1408 90% 1408 99% 1408 ms Write: 10% 1298 30% 1298 50% 1298 90% 1298 99% 1298 ms Write: 10% 1524 30% 1524 50% 1524 90% 1524 99% 1524 ms Write: 10% 1173 30% 1173 50% 1173 90% 1173 99% 1173 ms Write: 10% 1093 30% 1093 50% 1093 90% 1093 99% 1093 ms Write: 10% 1115 30% 1115 50% 1115 90% 1115 99% 1115 ms Write: 10% 1139 30% 1139 50% 1139 90% 1139 99% 1139 ms Read: 10% 10691 30% 10691 50% 10691 90% 10691 99% 10691 ms Step 3. write modify Write: 10% 856 30% 856 50% 856 90% 856 99% 856 ms Write: 10% 2409 30% 2409 50% 2409 90% 2409 99% 2409 ms Write: 10% 3773 30% 3773 50% 3773 90% 3773 99% 3773 ms Write: 10% 4138 30% 4138 50% 4138 90% 4138 99% 4138 ms Write: 10% 6238 30% 6238 50% 6238 90% 6238 99% 6238 ms Write: 10% 7045 30% 7045 50% 7045 90% 7045 99% 7045 ms Write: 10% 7065 30% 7065 50% 7065 90% 7065 99% 7065 ms Write: 10% 7315 30% 7315 50% 7315 90% 7315 99% 7315 ms Write: 10% 7766 30% 7766 50% 7766 90% 7766 99% 7766 ms Write: 10% 7478 30% 7478 50% 7478 90% 7478 99% 7478 ms Write: 10% 7478 30% 7478 50% 7478 90% 7478 99% 7478 ms Write: 10% 7172 30% 7172 50% 7172 90% 7172 99% 7172 ms Write: 10% 7237 30% 7237 50% 7237 90% 7237 99% 7237 ms Write: 10% 7305 30% 7305 50% 7305 90% 7305 99% 7305 ms Write: 10% 7155 30% 7155 50% 7155 90% 7155 99% 7155 ms Write: 10% 6766 30% 6766 50% 6766 90% 6766 99% 6766 ms Write: 10% 6881 30% 6881 50% 6881 90% 6881 99% 6881 ms Write: 10% 6892 30% 6892 50% 6892 90% 6892 99% 6892 ms Write: 10% 6566 30% 6566 50% 6566 90% 6566 99% 6566 ms Write: 10% 6792 30% 6792 50% 6792 90% 6792 99% 6792 ms Write: 10% 6313 30% 6313 50% 6313 90% 6313 99% 6313 ms Write: 10% 7108 30% 7108 50% 7108 90% 7108 99% 7108 ms Write: 10% 6228 30% 6228 50% 6228 90% 6228 99% 6228 ms Write: 10% 6094 30% 6094 50% 6094 90% 6094 99% 6094 ms Write: 10% 5856 30% 5856 50% 5856 90% 5856 99% 5856 ms Write: 10% 5891 30% 5891 50% 5891 90% 5891 99% 5891 ms Write: 10% 6167 30% 6167 50% 6167 90% 6167 99% 6167 ms Write: 10% 6314 30% 6314 50% 6314 90% 6314 99% 6314 ms Write: 10% 5507 30% 5507 50% 5507 90% 5507 99% 5507 ms Write: 10% 5838 30% 5838 50% 5838 90% 5838 99% 5838 ms Write: 10% 5440 30% 5440 50% 5440 90% 5440 99% 5440 ms Write: 10% 5324 30% 5324 50% 5324 90% 5324 99% 5324 ms Write: 10% 5543 30% 5543 50% 5543 90% 5543 99% 5543 ms Write: 10% 5736 30% 5736 50% 5736 90% 5736 99% 5736 ms Write: 10% 5589 30% 5589 50% 5589 90% 5589 99% 5589 ms Write: 10% 4887 30% 4887 50% 4887 90% 4887 99% 4887 ms Write: 10% 4605 30% 4605 50% 4605 90% 4605 99% 4605 ms Write: 10% 4709 30% 4709 50% 4709 90% 4709 99% 4709 ms Write: 10% 4663 30% 4663 50% 4663 90% 4663 99% 4663 ms Write: 10% 5147 30% 5147 50% 5147 90% 5147 99% 5147 ms Write: 10% 4686 30% 4686 50% 4686 90% 4686 99% 4686 ms Write: 10% 4737 30% 4737 50% 4737 90% 4737 99% 4737 ms Write: 10% 4800 30% 4800 50% 4800 90% 4800 99% 4800 ms Write: 10% 4313 30% 4313 50% 4313 90% 4313 99% 4313 ms Write: 10% 4292 30% 4292 50% 4292 90% 4292 99% 4292 ms Write: 10% 4119 30% 4119 50% 4119 90% 4119 99% 4119 ms Write: 10% 4027 30% 4027 50% 4027 90% 4027 99% 4027 ms Write: 10% 4566 30% 4 ... python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02a1fe640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02a9ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02b7ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02d1ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02fdff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02c9fe640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb02f5fe640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb0319ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb0327ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fb0347fe640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 355 in wait File "contrib/python/grpcio/py3/grpc/_channel.py", line 1732 in _close File "contrib/python/grpcio/py3/grpc/_channel.py", line 1750 in close File "contrib/python/ydb/py3/ydb/connection.py", line 514 in destroy File "contrib/python/ydb/py3/ydb/connection.py", line 510 in close File "contrib/python/ydb/py3/ydb/connection.py", line 480 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fb05785c440 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1624 in _shutdown Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/4zc4/00003a/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4zc4/00003a', '--source-root', '/home/runner/.ya/build/build_root/4zc4/00003a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/4zc4/00003a/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/4zc4/00003a', '--source-root', '/home/runner/.ya/build/build_root/4zc4/00003a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/4zc4/00003a/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) 2025-05-05 03:58:00,322 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-05-05 03:58:00,322 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.6%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 1 test (total:2.74s - test:2.12s canon:0.33s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-relwithdebinfo] (0.84s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/functional/hive [size:medium] nchunks:20 ------ [test_drain.py 0/20] chunk ran 1 test (total:15.70s - test:15.60s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-relwithdebinfo] (13.52s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 102 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037955: 4) (72075186224038021: 4) (72075186224038341: 4) (72075186224038349: 4) (72075186224038364: None) (72075186224038365: 4) (72075186224038372: None) (72075186224038373: 4) (72075186224038388: None) (72075186224038389: 4). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:126.83s - test:125.38s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (122.83s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:113.72s - test:112.16s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (108.83s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/tenants [size:medium] nchunks:20 ------ [0/20] chunk ran 1 test (total:16.17s - test:16.09s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (13.55s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [1/20] chunk ran 1 test (total:17.21s - test:17.10s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (13.86s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [2/20] chunk ran 1 test (total:16.37s - test:16.31s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (14.16s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [3/20] chunk ran 1 test (total:16.17s - test:16.09s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (14.16s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [4/20] chunk ran 1 test (total:71.17s - test:71.11s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (68.61s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [5/20] chunk ran 1 test (total:76.56s - test:76.52s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (73.67s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ FAIL: 6 - FAIL ydb/tests/functional/tenants ydb/tests/olap/data_quotas [size:medium] nchunks:10 ------ [2/10] chunk ran 1 test (total:219.77s - test:219.65s) [fail] test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [default-linux-x86_64-relwithdebinfo] (217.22s) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:236: in test_duplicates self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, table_path, 0, retries=0), timeout_seconds=200) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:83: in upsert_until_overload assert time.time() <= deadline, "deadline exceeded" E AssertionError: deadline exceeded E assert 1746417053.8658574 <= 1746417053.8550928 E + where 1746417053.8658574 = () E + where = time.time Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test_duplicates.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ FAIL: 2 - GOOD, 1 - FAIL ydb/tests/olap/data_quotas ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:632.35s - test:600.01s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 183.73s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 138.08s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 122.47s test_alter_tiering.py::TestAlterTiering::test[many_tables] (timeout) duration: 105.72s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 55.51s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 15.20s test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] (good) duration: 2.00s test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] (good) duration: 2.00s test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] (good) duration: 1.95s test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] (good) duration: 1.89s 8 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-relwithdebinfo] (105.72s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 9 - GOOD, 8 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:609.22s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 607.09s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (607.09s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:244.22s - test:243.24s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 90.7G (95062160K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1763386 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1764223 32.5M 20.8M 8.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1764291 270M 273M 220M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:f 1766297 10.5G 10.4G 10.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766300 10.1G 9.8G 9.9G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766302 10.3G 10.0G 10.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766304 10.5G 10.4G 10.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766306 10.1G 9.9G 9.9G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766308 10.3G 10.1G 10.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766346 10.0G 9.8G 9.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766348 10.0G 9.8G 9.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1766350 10.3G 10.0G 10.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:253.60s - test:251.01s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 10.6G (11135136K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1763172 44.8M 43.6M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1763880 32.7M 20.5M 8.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1763996 1.4G 1.1G 1.1G └─ ydb-tests-stress-simple_queue-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fa 1766278 1.1G 982M 822M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766299 1.0G 967M 809M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766301 1022M 972M 798M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766303 1.1G 1018M 849M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766305 1.0G 910M 838M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766307 1.0G 873M 813M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766312 1.3G 1.1G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766347 1.1G 968M 872M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1766349 1.2G 1.1G 962M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff/stderr Total 10 suites: 3 - GOOD 5 - FAIL 2 - TIMEOUT Total 43 tests: 22 - GOOD 11 - FAIL 8 - NOT_LAUNCHED 2 - TIMEOUT Cache efficiency ratio is 99.66% (35511 of 35631). Local: 40 (0.11%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 35471 (99.55%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.33 GiB Additional disk space consumed for build cache 0 bytes Critical path: [633251 ms] [TM] [rnd-mi9k46fwamlwtdkr default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 0 (1746416849283), finished: 633251 (1746417482534)] Time from start: 651702.7111816406 ms, time elapsed by graph 633251 ms, time diff 18451.711181640625 ms. The longest 10 tasks: [633251 ms] [TM] [rnd-mi9k46fwamlwtdkr default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746416849283, finished: 1746417482534] [609647 ms] [TM] [rnd-13013293576118712460 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746416846987, finished: 1746417456634] [571911 ms] [TM] [rnd-13388371783728283559 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746416846428, finished: 1746417418339] [393371 ms] [TM] [rnd-9593616170385786766 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746416847049, finished: 1746417240420] [253954 ms] [TM] [rnd-baw1gcq08ygotinv default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/simple_queue/tests/py3test [started: 1746416849258, finished: 1746417103212] [244645 ms] [TM] [rnd-8e7q90q73q3ks7pv default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/olap_workload/tests/py3test [started: 1746416849352, finished: 1746417093997] [220220 ms] [TM] [rnd-16867932711069529987 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746416846250, finished: 1746417066470] [197818 ms] [TM] [rnd-6500226764417088599 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746416846948, finished: 1746417044766] [160048 ms] [TM] [rnd-9064480291500559776 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746416846845, finished: 1746417006893] [158546 ms] [TM] [rnd-5037090940617043126 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746416846354, finished: 1746417004900] Total time by type: [4561699 ms] [TM] [count: 114, ave time 40014.90 msec] [ 24782 ms] [TA] [count: 6, ave time 4130.33 msec] [ 4582 ms] [prepare:get from local cache] [count: 40, ave time 114.55 msec] [ 4063 ms] [prepare:bazel-store] [count: 1, ave time 4063.00 msec] [ 1636 ms] [prepare:AC] [count: 2, ave time 818.00 msec] [ 1499 ms] [prepare:tools] [count: 12, ave time 124.92 msec] [ 634 ms] [prepare:put to dist cache] [count: 21, ave time 30.19 msec] [ 37 ms] [prepare:clean] [count: 3, ave time 12.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 4586481 ms (100.00%) Total run tasks time - 4586481 ms Configure time - 20.9 s Statistics overhead 800 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.jvIwrReDhY --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 | 5.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut | 6.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |10.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |15.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |18.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |19.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |20.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |25.5%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |30.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |32.3%| PREPARE $(LLD_ROOT-3808007503) |32.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |33.3%| PREPARE $(CLANG18-1866954364) |33.9%| PREPARE $(YMAKE_PYTHON3-4256832079) |34.4%| PREPARE $(CLANG16-1380963495) |34.9%| PREPARE $(CLANG_FORMAT-1286082657) |35.4%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |36.5%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |37.0%| PREPARE $(FLAKE8_PY3-715603131) |37.5%| PREPARE $(CLANG-874354456) |38.0%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |38.5%| PREPARE $(PYTHON) |39.1%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |39.6%| PREPARE $(GDB) |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |40.6%| PREPARE $(CLANG-1922233694) |42.7%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |43.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |43.8%| COMPACTING CACHE 59.6GiB |44.3%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |44.8%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |45.3%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |45.8%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |46.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |46.9%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |47.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |47.9%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |48.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |49.0%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |49.5%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |50.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |50.5%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |51.0%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |51.6%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |52.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |52.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |53.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |53.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |54.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> test_drain.py::TestHive::test_drain_on_stop |54.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> unstable_connection.py::TestUnstableConnection::test >> data_correctness.py::TestDataCorrectness::test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> ttl_unavailable_s3.py::TestUnavailableS3::test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |63.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |64.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |69.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |70.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |70.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |71.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |71.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |72.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] |72.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |74.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |76.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_workload.py::TestYdbWorkload::test |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_workload.py::TestYdbWorkload::test[row] |78.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] 2025-05-05 04:10:46,681 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |82.8%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |85.9%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |86.5%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test_multi[alter_table] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] |88.0%| [TA] $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> data_correctness.py::TestDataCorrectness::test [GOOD] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/00006e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/00006e/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1795933 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] |90.6%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> unstable_connection.py::TestUnstableConnection::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/00006f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/00006f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1799096 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000073/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000073/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1795978 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000072/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000072/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {'__DEFAULT': 100000}, portions: 2 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1795258 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000074/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000074/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1798016 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] >> test_workload.py::TestYdbWorkload::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert #103 ok, result: [] Quota exceeded False upsert #104 ok, result: [] Quota exceeded False upsert #105 ok, result: [] Quota exceeded False upsert #106 ok, result: [] Quota exceeded False upsert #107 ok, result: [] Quota exceeded False upsert #108 ok, result: [] Quota exceeded False upsert #109 ok, result: [] Quota exceeded False upsert #110 ok, result: [] Quota exceeded False |94.3%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |95.8%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test |96.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |96.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000075/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000075/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 1795246 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-05 04:20:41,143 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 04:20:41,202 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1790564 168M 172M 114M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/mmfs/000070/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1792795 978M 971M 717M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk 1797470 110M 108M 82.6M └─ moto_server s3 --port 18213 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/mmfs/000070/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/mmfs/000070', '--source-root', '/home/runner/.ya/build/build_root/mmfs/000070/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...ttl_tiering/ydb-tests-olap-ttl_tiering', '--basetemp', '/home/runner/.ya/build/build_root/mmfs/000070/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/mmfs/000070', '--source-root', '/home/runner/.ya/build/build_root/mmfs/000070/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/mmfs/000070/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '1', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-05-05 04:20:44,053 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 04:20:44,224 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1794019 4.1G 4.1G 4.1G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/mmfs/000020/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1814598 1.7G 1.7G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alt Test command err: test_suffix, num 0, table path create_and_drop_tables start_time 1746418248.3114948 Path create_and_drop_tables removed Path create_and_drop_tables removed test_suffix, num 0, table path alter_table start_time 1746418277.6650894 Path alter_table removed Path alter_table removed test_suffix, num 0, table path alter_tablestore start_time 1746418278.3277044 Path alter_tablestore removed Path alter_tablestore removed test_suffix, num 0, table path table start_time 1746418279.115929 Path table removed Path table removed test_suffix, num 0, table path tablestores start_time 1746418279.6305833 Path tablestores removed Path tablestores removed test_suffix, num 0, table path read_update_write_load start_time 1746418309.5629623 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 3830 30% 3830 50% 3830 90% 3830 99% 3830 ms Write: 10% 8278 30% 8278 50% 8278 90% 8278 99% 8278 ms Write: 10% 8239 30% 8239 50% 8239 90% 8239 99% 8239 ms Write: 10% 8923 30% 8923 50% 8923 90% 8923 99% 8923 ms Write: 10% 8520 30% 8520 50% 8520 90% 8520 99% 8520 ms Write: 10% 8605 30% 8605 50% 8605 90% 8605 99% 8605 ms Write: 10% 8923 30% 8923 50% 8923 90% 8923 99% 8923 ms Write: 10% 8546 30% 8546 50% 8546 90% 8546 99% 8546 ms Write: 10% 8208 30% 8208 50% 8208 90% 8208 99% 8208 ms Write: 10% 8443 30% 8443 50% 8443 90% 8443 99% 8443 ms Write: 10% 8672 30% 8672 50% 8672 90% 8672 99% 8672 ms Write: 10% 8108 30% 8108 50% 8108 90% 8108 99% 8108 ms Write: 10% 7849 30% 7849 50% 7849 90% 7849 99% 7849 ms Write: 10% 7453 30% 7453 50% 7453 90% 7453 99% 7453 ms Write: 10% 7419 30% 7419 50% 7419 90% 7419 99% 7419 ms Write: 10% 7575 30% 7575 50% 7575 90% 7575 99% 7575 ms Write: 10% 7597 30% 7597 50% 7597 90% 7597 99% 7597 ms Write: 10% 6993 30% 6993 50% 6993 90% 6993 99% 6993 ms Write: 10% 6422 30% 6422 50% 6422 90% 6422 99% 6422 ms Write: 10% 6500 30% 6500 50% 6500 90% 6500 99% 6500 ms Write: 10% 6387 30% 6387 50% 6387 90% 6387 99% 6387 ms Write: 10% 6206 30% 6206 50% 6206 90% 6206 99% 6206 ms Write: 10% 6967 30% 6967 50% 6967 90% 6967 99% 6967 ms Write: 10% 6653 30% 6653 50% 6653 90% 6653 99% 6653 ms Write: 10% 5763 30% 5763 50% 5763 90% 5763 99% 5763 ms Write: 10% 6075 30% 6075 50% 6075 90% 6075 99% 6075 ms Write: 10% 4752 30% 4752 50% 4752 90% 4752 99% 4752 ms Write: 10% 5109 30% 5109 50% 5109 90% 5109 99% 5109 ms Write: 10% 6223 30% 6223 50% 6223 90% 6223 99% 6223 ms Write: 10% 5774 30% 5774 50% 5774 90% 5774 99% 5774 ms Write: 10% 4941 30% 4941 50% 4941 90% 4941 99% 4941 ms Write: 10% 6114 30% 6114 50% 6114 90% 6114 99% 6114 ms Write: 10% 5481 30% 5481 50% 5481 90% 5481 99% 5481 ms Write: 10% 4509 30% 4509 50% 4509 90% 4509 99% 4509 ms Write: 10% 4630 30% 4630 50% 4630 90% 4630 99% 4630 ms Write: 10% 4588 30% 4588 50% 4588 90% 4588 99% 4588 ms Write: 10% 4072 30% 4072 50% 4072 90% 4072 99% 4072 ms Write: 10% 3195 30% 3195 50% 3195 90% 3195 99% 3195 ms Write: 10% 3501 30% 3501 50% 3501 90% 3501 99% 3501 ms Write: 10% 3262 30% 3262 50% 3262 90% 3262 99% 3262 ms Write: 10% 3136 30% 3136 50% 3136 90% 3136 99% 3136 ms Write: 10% 3989 30% 3989 50% 3989 90% 3989 99% 3989 ms Write: 10% 4120 30% 4120 50% 4120 90% 4120 99% 4120 ms Write: 10% 3993 30% 3993 50% 3993 90% 3993 99% 3993 ms Write: 10% 3136 30% 3136 50% 3136 90% 3136 99% 3136 ms Write: 10% 2649 30% 2649 50% 2649 90% 2649 99% 2649 ms Write: 10% 4218 30% 4218 50% 4218 90% 4218 99% 4218 ms Write: 10% 2416 30% 2416 50% 2416 90% 2416 99% 2416 ms Write: 10% 4547 30% 4547 50% 4547 90% 4547 99% 4547 ms Write: 10% 2293 30% 2293 50% 2293 90% 2293 99% 2293 ms Write: 10% 3122 30% 3122 50% 3122 90% 3122 99% 3122 ms Write: 10% 2752 30% 2752 50% 2752 90% 2752 99% 2752 ms Write: 10% 2364 30% 2364 50% 2364 90% 2364 99% 2364 ms Write: 10% 2839 30% 2839 50% 2839 90% 2839 99% 2839 ms Write: 10% 2705 30% 2705 50% 2705 90% 2705 99% 2705 ms Write: 10% 2486 30% 2486 50% 2486 90% 2486 99% 2486 ms Write: 10% 2664 30% 2664 50% 2664 90% 2664 99% 2664 ms Write: 10% 2376 30% 2376 50% 2376 90% 2376 99% 2376 ms Write: 10% 2227 30% 2227 50% 2227 90% 2227 99% 2227 ms Write: 10% 2348 30% 2348 50% 2348 90% 2348 99% 2348 ms Write: 10% 2476 30% 2476 50% 2476 90% 2476 99% 2476 ms Write: 10% 2276 30% 2276 50% 2276 90% 2276 99% 2276 ms Write: 10% 3082 30% 3082 50% 3082 90% 3082 99% 3082 ms Write: 10% 2704 30% 2704 50% 2704 90% 2704 99% 2704 ms Step 2. read write Write: 10% 925 30% 925 50% 925 90% 925 99% 925 ms Write: 10% 8147 30% 8147 50% 8147 90% 8147 99% 8147 ms Write: 10% 8242 30% 8242 50% 8242 90% 8242 99% 8242 ms Write: 10% 8291 30% 8291 50% 8291 90% 8291 99% 8291 ms Write: 10% 8593 30% 8593 50% 8593 90% 8593 99% 8593 ms Write: 10% 9316 30% 9316 50% 9316 90% 9316 99% 9316 ms Write: 10% 8878 30% 8878 50% 8878 90% 8878 99% 8878 ms Write: 10% 8982 30% 8982 50% 8982 90% 8982 99% 8982 ms Write: 10% 8540 30% 8540 50% 8540 90% 8540 99% 8540 ms Write: 10% 8950 30% 8950 50% 8950 90% 8950 99% 8950 ms Write: 10% 8155 30% 8155 50% 8155 90% 8155 99% 8155 ms Write: 10% 7347 30% 7347 50% 7347 90% 7347 99% 7347 ms Write: 10% 6877 30% 6877 50% 6877 90% 6877 99% 6877 ms Write: 10% 7609 30% 7609 50% 7609 90% 7609 99% 7609 ms Write: 10% 6401 30% 6401 50% 6401 90% 6401 99% 6401 ms Write: 10% 6256 30% 6256 50% 6256 90% 6256 99% 6256 ms Write: 10% 6213 30% 6213 50% 6213 90% 6213 99% 6213 ms Write: 10% 5328 30% 5328 50% 5328 90% 5328 99% 5328 ms Write: 10% 5000 30% 5000 50% 5000 90% 5000 99% 5000 ms Write: 10% 5507 30% 5507 50% 5507 90% 5507 99% 5507 ms Write: 10% 5106 30% 5106 50% 5106 90% 5106 99% 5106 ms Write: 10% 5023 30% 5023 50% 5023 90% 5023 99% 5023 ms Write: 10% 4794 30% 4794 50% 4794 90% 4794 99% 4794 ms Write: 10% 4257 30% 4257 50% 4257 90% 4257 99% 4257 ms Write: 10% 4254 30% 4254 50% 4254 90% 4254 99% 4254 ms Write: 10% 4246 30% 4246 50% 4246 90% 4246 99% 4246 ms Write: 10% 4102 30% 4102 50% 4102 90% 4102 99% 4102 ms Write: 10% 3880 30% 3880 50% 3880 90% 3880 99% 3880 ms Write: 10% 3427 30% 3427 50% 3427 90% 3427 99% 3427 ms Write: 10% 4247 30% 4247 50% 4247 90% 4247 99% 4247 ms Write: 10% 3426 30% 3426 50% 3426 90% 3426 99% 3426 ms Write: 10% 3432 30% 3432 50% 3432 90% 3432 99% 3432 ms Write: 10% 4190 30% 4190 50% 4190 90% 4190 99% 4190 ms Write: 10% 3431 30% 3431 50% 3431 90% 3431 99% 3431 ms Write: 10% 3364 30% 3364 50% 3364 90% 3364 99% 3364 ms Write: 10% 3309 30% 3309 50% 3309 90% 3309 99% 3309 ms Write: 10% 3375 30% 3375 50% 3375 90% 3375 99% 3375 ms Write: 10% 3119 30% 3119 50% 3119 90% 3119 99% 3119 ms Write: 10% 3259 30% 3259 50% 3259 90% 3259 99% 3259 ms Write: 10% 3042 30% 3042 50% 3042 90% 3042 99% 3042 ms Write: 10% 3139 30% 3139 50% 3139 90% 3139 99% 3139 ms Write: 10% 2849 30% 2849 50% 2849 90% 2849 99% 2849 ms Write: 10% 3459 30% 3459 50% 3459 90% 3459 99% 3459 ms Write: 10% 2944 30% 2944 50% 2944 90% 2944 99% 2944 ms Write: 10% 3093 30% 3093 50% 3093 90% 3093 99% 3093 ms Write: 10% 2699 30% 2699 50% 2699 90% 2699 99% 2699 ms Write: 10% 2545 30% 2545 50% 2545 90% 2545 99% 2545 ms Write: 10% 2738 30% 2738 50% 2738 90% 2738 99% 2738 ms Write: 10% 2613 30% 2613 50% 2613 90% 2613 99% 2613 ms Write: 10% 2499 30% 2499 50% 2499 90% 2499 99% 2499 ms Write: 10% 2801 30% 2801 50% 2801 90% 2801 99% 2801 ms Write: 10% 2306 30% 2306 50% 2306 90% 2306 99% 2306 ms Write: 10% 2352 30% 2352 50% 2352 90% 2352 99% 2352 ms Write: 10% 2820 30% 2820 50% 2820 90% 2820 99% 2820 ms Write: 10% 2130 30% 2130 50% 2130 90% 2130 99% 2130 ms Write: 10% 2260 30% 2260 50% 2260 90% 2260 99% 2260 ms Write: 10% 2871 30% 2871 50% 2871 90% 2871 99% 2871 ms Write: 10% 2440 30% 2440 50% 2440 90% 2440 99% 2440 ms Write: 10% 2058 30% 2058 50% 2058 90% 2058 99% 2058 ms Write: 10% 2099 30% 2099 50% 2099 90% 2099 99% 2099 ms Write: 10% 2474 30% 2474 50% 2474 90% 2474 99% 2474 ms Write: 10% 2105 30% 2105 50% 2105 90% 2105 99% 2105 ms Write: 10% 1985 30% 1985 50% 1985 90% 1985 99% 1985 ms Write: 10% 1801 30% 1801 50% 1801 90% 1801 99% 1801 ms Read: 10% 14830 30% 14830 50% 14830 90% 14830 99% 14830 ms Step 3. write modify Write: 10% 3068 30% 3068 50% 3068 90% 3068 99% 3068 ms Write: 10% 8039 30% 8039 50% 8039 90% 8039 99% 8039 ms Write: 10% 9667 30% 9667 50% 9667 90% 9667 99% 9667 ms Write: 10% 10013 30% 10013 50% 10013 90% 10013 99% 10013 ms Write: 10% 9693 30% 9693 50% 9693 90% 9693 99% 9693 ms Write: 10% 9248 30% 9248 50% 9248 90% 9248 99% 9248 ms Write: 10% 9313 30% 9313 50% 9313 90% 9313 99% 9313 ms Write: 10% 9125 30% 9125 50% 9125 90% 9125 99% 9125 ms Write: 10% 8888 30% 8888 50% 8888 90% 8888 99% 8888 ms Write: 10% 9279 30% 9279 50% 9279 90% 9279 99% 9279 ms Write: 10% 8750 30% 8750 50% 8750 90% 8750 99% 8750 ms Write: 10% 8821 30% 8821 50% 8821 90% 8821 99% 8821 ms Write: 10% 8756 30% 8756 50% 8756 90% 8756 99% 8756 ms Write: 10% 8336 30% 8336 50% 8336 90% 8336 99% 8336 ms Write: 10% 8590 30% 8590 50% 8590 90% 8590 99% 8590 ms Write: 10% 8465 30% 8465 50% 8465 90% 8465 99% 8465 ms Write: 10% 7622 30% 7622 50% 7622 90% 7622 99% 7622 ms Write: 10% 7725 30% 7725 50% 7725 90% 7725 99% 7725 ms Write: 10% 8384 30% 8384 50% 8384 90% 8384 99% 8384 ms Write: 10% 8230 30% 8230 50% 8230 90% 8230 99% 8230 ms Write: 10% 8030 30% 8030 50% 8030 90% 8030 99% 8030 ms Write: 10% 6851 30% 6851 50% 6851 90% 6851 99% 6851 ms Write: 10% 6413 30% 6413 50% 6413 90% 6413 99% 6413 ms Write: 10% 6863 30% 6863 50% 6863 90% 6863 99% 6863 ms Write: 10% 7544 30% 7544 50% 7544 90% 7544 99% 7544 ms Write: 10% 7390 30% 7390 50% 7390 90% 7390 99% 7390 ms Write: 10% 6134 30% 6134 50% 6134 90% 6134 99% 6134 ms Write: 10% 6187 30% 6187 50% 6187 90% 6187 99% 6187 ms Write: 10% 7011 30% 7011 50% 7011 90% 7011 99% 7011 ms Write: 10% 5611 30% 5611 50% 5611 90% 5611 99% 5611 ms Write: 10% 6218 30% 6218 50% 6218 90% 6218 99% 6218 ms Write: 10% 7542 30% 7542 50% 7542 90% 7542 99% 7542 ms Write: 10% 5957 30% 5957 50% 5957 90% 5957 99% 5957 ms Write: 10% 5523 30% 5523 50% 5523 90% 5523 99% 5523 ms Write: 10% 6209 30% 6209 50% 6209 90% 6209 99% 6209 ms Write: 10% 5569 30% 5569 50% 5569 90% 5569 99% 5569 ms Write: 10% 6057 30% 6057 50% 6057 90% 6057 99% 6057 ms Write: 10% 5232 30% 5232 50% 5232 90% 5232 99% 5232 ms Write: 10% 5228 30% 5228 50% 5228 90% 5228 99% 5228 ms Write: 10% 5880 30% 5880 50% 5880 90% 5880 99% 5880 ms Write: 10% 4995 30% 4995 50% 4995 90% 4995 99% 4995 ms Write: 10% 4954 30% 4954 50% 4954 90% 4954 99% 4954 ms Write: 10% 4958 30% 4958 50% 4958 90% 4958 99% 4958 ms Write: 10% 4826 30% 4826 50% 4826 90% 4826 99% 4826 ms Write: 10% 4751 30% 4751 50% 4751 90% 4751 99% 4751 ms Write: 10% 4366 30% 4366 50% 4366 90% 4366 99% 4366 ms Write: 10% 5593 30% 5593 50% 5593 90% 5593 99% 5593 ms Write: 10% 5095 30% 5095 50% 5095 90% 5095 99% 5095 ms Write: 10% 4605 30% 4605 50% 4605 90% 4605 99% 4605 ms Write: 10% 4365 30% 4365 50% 4365 90% 4365 99% 4365 ms Write: 10% 4552 30% 4552 50% 4552 90% 4552 99% 4552 ms Write: 10% 4565 30% 4565 50% 4565 90% 4565 99% 4565 ms Write: 10% 4306 30% 4306 50% 4306 90% 4306 99% 4306 ms Write: 10% 4406 30% 4406 50% 4406 90% 4406 99% 4406 ms Write: 10% 6079 30% 6079 50% 6079 90% 6079 99% 6079 ms Write: 10% 4051 30% 4051 50% 4051 90% 4051 99% 4051 ms Write: 10% 4253 30% 4253 50% 4253 90% 4253 99% 4253 ms Write: 10% 4695 30% 4695 50% 4695 90% 4695 99% 4695 ms Write: 10% 3836 30% 3836 50% 3836 90% 3836 99% 3836 ms Write: 10% 4093 30% 4093 50% 4093 90% 4093 99% 4093 ms Write: 10% 4451 30% 4451 50% 4451 90% 4451 99% 4451 ms Write: 10% 4127 30% 4127 50% 4127 90% 4127 99% 4127 ms Write: 10% 4272 30% 4272 50% 4272 90% 4272 99% 4272 ms Write: 10% 4477 30% 4477 50% 4477 90% 4477 99% 4477 ms Update: 10% 1010 30% 1010 50% 1010 90% 1010 99% 1010 ms Step 4. read modify write Write: 10% 3858 30% 3858 50% 3858 90% 3858 99% 3858 ms Write: 10% 8562 30% 8562 50% 8562 90% 8562 99% 8562 ms Write: 10% 8908 30% 8908 50% 8908 90% 8908 99% 8908 ms Write: 10% 9946 30% 9946 50% 9946 90% 9946 99% 9946 ms Write: 10% 7682 30% 7682 50% 7682 90% 7682 99% 7682 ms Write: 10% 7313 30% 7313 50% 7313 90% 7313 99% 7313 ms Write: 10% 8025 30% 8025 50% 8025 90% 8025 99% 8025 ms Write: 10% 8589 30% 8589 50% 8589 90% 8589 99% 8589 ... loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/mmfs/000020/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 100, in test self._test_suffix(ctx, get_external_param("table_suffix", ""), exit_codes, 0) File "/home/runner/.ya/build/build_root/mmfs/000020/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110, in _test_suffix ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_tiering.py", line 356, in scenario_many_tables threads.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Caught base exception, num 0 message Graceful shutdown requested contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). contrib/python/moto/py3/moto/s3/models.py:122: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/mmfs/000020/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/mmfs/000020', '--source-root', '/home/runner/.ya/build/build_root/mmfs/000020/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/mmfs/000020/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/mmfs/000020', '--source-root', '/home/runner/.ya/build/build_root/mmfs/000020/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/mmfs/000020/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |99.0%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.5%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| CLEANING BUILD ROOT ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 1 test (total:2.78s - test:2.22s canon:0.32s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-relwithdebinfo] (0.90s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/functional/hive [size:medium] nchunks:20 ------ [test_drain.py 0/20] chunk ran 1 test (total:22.61s - test:22.51s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-relwithdebinfo] (20.71s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 71 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037945: 4) (72075186224037962: 4) (72075186224037994: 4) (72075186224038019: 4) (72075186224038027: 4) (72075186224038039: 6) (72075186224038044: 4) (72075186224038067: 4) (72075186224038068: 6) (72075186224038092: 4). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:106.63s - test:106.14s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (103.33s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:101.86s - test:101.42s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (98.27s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/tenants [size:medium] nchunks:20 ------ [0/20] chunk ran 1 test (total:15.14s - test:15.10s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (12.21s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [1/20] chunk ran 1 test (total:15.32s - test:15.27s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (12.18s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [2/20] chunk ran 1 test (total:16.53s - test:16.49s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (14.25s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [3/20] chunk ran 1 test (total:16.73s - test:16.69s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (14.08s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [4/20] chunk ran 1 test (total:76.13s - test:76.09s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (73.50s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [5/20] chunk ran 1 test (total:76.13s - setup:0.01s test:76.08s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (73.36s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ FAIL: 6 - FAIL ydb/tests/functional/tenants ydb/tests/olap/data_quotas [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:226.79s - test:226.72s) [fail] test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [default-linux-x86_64-relwithdebinfo] (224.53s) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:236: in test_duplicates self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, table_path, 0, retries=0), timeout_seconds=200) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:83: in upsert_until_overload assert time.time() <= deadline, "deadline exceeded" E AssertionError: deadline exceeded E assert 1746418450.4814816 <= 1746418448.122969 E + where 1746418450.4814816 = () E + where = time.time Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test_duplicates.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/olap/data_quotas ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:628.48s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 169.59s test_alter_tiering.py::TestAlterTiering::test[many_tables] (timeout) duration: 140.22s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 93.87s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 81.44s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 71.97s test_simple.py::TestSimple::test[tablestores] (good) duration: 27.61s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 27.09s test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] (good) duration: 2.32s test_simple.py::TestSimple::test_multi[alter_table] (good) duration: 2.25s test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] (good) duration: 2.22s 8 more tests with 6.04s total duration are not listed. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.1G (8506952K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1792872 44.8M 44.8M 6.6M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1793951 33.6M 22.1M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1794019 6.2G 6.3G 6.2G └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 1814598 1.7G 1.7G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/te Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-relwithdebinfo] (140.22s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 17 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:614.19s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 612.02s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (612.02s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:230.10s - test:229.62s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 88.4G (92743732K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1792990 44.8M 44.8M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1794011 32.7M 20.7M 8.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1794054 264M 266M 214M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:f 1795259 10.4G 10.1G 10.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795260 10.1G 10.0G 9.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795261 9.7G 9.3G 9.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795262 10.1G 9.8G 9.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795263 9.9G 9.6G 9.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795264 9.7G 9.7G 9.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795265 9.7G 9.6G 9.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795266 10.0G 10.0G 9.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 1795267 9.7G 9.7G 9.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:238.47s - test:236.39s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.4G (11942840K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1792826 44.8M 44.8M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1793932 32.6M 20.7M 8.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1793989 1.3G 1.3G 1.3G └─ ydb-tests-stress-simple_queue-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fa 1796141 1.1G 1.1G 922M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796155 1.0G 1.0G 848M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796213 1.1G 1.1G 876M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796270 1.2G 1.2G 980M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796278 1.2G 1.2G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796286 1.1G 1.1G 948M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796307 1.1G 1.1G 845M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796361 1008M 1001M 785M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 1796438 1.1G 1.1G 907M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff/stderr Total 9 suites: 2 - GOOD 5 - FAIL 2 - TIMEOUT Total 40 tests: 27 - GOOD 11 - FAIL 2 - TIMEOUT Cache efficiency ratio is 99.78% (35496 of 35575). Local: 39 (0.11%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 35457 (99.67%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.33 GiB Additional disk space consumed for build cache 0 bytes Critical path: [614768 ms] [TM] [rnd-1885414392543078807 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 0 (1746418240542), finished: 614768 (1746418855310)] [ 20867 ms] [TA] [rnd-g4a723m5mc382bef]: $(BUILD_ROOT)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} [started: 614782 (1746418855324), finished: 635649 (1746418876191)] Time from start: 644276.5041503906 ms, time elapsed by graph 635635 ms, time diff 8641.504150390625 ms. The longest 10 tasks: [628874 ms] [TM] [rnd-0yyp4c29enpf97l4 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746418243603, finished: 1746418872477] [614768 ms] [TM] [rnd-1885414392543078807 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746418240542, finished: 1746418855310] [384402 ms] [TM] [rnd-7346958705883106101 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746418240540, finished: 1746418624942] [238872 ms] [TM] [rnd-dmgjm56t0usrvjqo default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/simple_queue/tests/py3test [started: 1746418243590, finished: 1746418482462] [230503 ms] [TM] [rnd-dr03ybceps8763sd default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/olap_workload/tests/py3test [started: 1746418243641, finished: 1746418474144] [227386 ms] [TM] [rnd-937366423579286764 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746418240550, finished: 1746418467936] [198022 ms] [TM] [rnd-7106574339642039639 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746418240542, finished: 1746418438564] [197486 ms] [TM] [rnd-344250548490109393 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746418240534, finished: 1746418438020] [148143 ms] [TM] [rnd-5553734489523990655 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746418240540, finished: 1746418388683] [140864 ms] [TM] [rnd-14372840053932445855 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746418240659, finished: 1746418381523] Total time by type: [3710238 ms] [TM] [count: 74, ave time 50138.35 msec] [ 24509 ms] [TA] [count: 5, ave time 4901.80 msec] [ 3920 ms] [prepare:bazel-store] [count: 1, ave time 3920.00 msec] [ 3878 ms] [prepare:get from local cache] [count: 39, ave time 99.44 msec] [ 1378 ms] [prepare:tools] [count: 12, ave time 114.83 msec] [ 1371 ms] [prepare:AC] [count: 2, ave time 685.50 msec] [ 535 ms] [prepare:put to dist cache] [count: 25, ave time 21.40 msec] [ 84 ms] [prepare:clean] [count: 3, ave time 28.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 3734747 ms (100.00%) Total run tasks time - 3734747 ms Configure time - 16.4 s Statistics overhead 783 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json Ok + echo 0